ia64/xen-unstable

view xen/include/asm-x86/spinlock.h @ 18666:c003e5a23a4e

Clean up spinlock operations and compile as first-class functions.

This follows modern Linux, since apparently outlining spinlock
operations does not slow down execution. The cleanups will also allow
more convenient addition of diagnostic code.

Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Mon Oct 20 16:48:17 2008 +0100 (2008-10-20)
parents 4e3316ed1af5
children 7989e3999e83
line source
1 #ifndef __ASM_SPINLOCK_H
2 #define __ASM_SPINLOCK_H
4 #include <xen/config.h>
5 #include <xen/lib.h>
6 #include <asm/atomic.h>
7 #include <asm/rwlock.h>
9 typedef struct {
10 volatile s16 lock;
11 } raw_spinlock_t;
13 #define _RAW_SPIN_LOCK_UNLOCKED /*(raw_spinlock_t)*/ { 1 }
15 #define _raw_spin_is_locked(x) \
16 (*(volatile char *)(&(x)->lock) <= 0)
18 static inline void _raw_spin_lock(raw_spinlock_t *lock)
19 {
20 asm volatile (
21 "1: lock; decb %0 \n"
22 " js 2f \n"
23 ".section .text.lock,\"ax\"\n"
24 "2: rep; nop \n"
25 " cmpb $0,%0 \n"
26 " jle 2b \n"
27 " jmp 1b \n"
28 ".previous"
29 : "=m" (lock->lock) : : "memory" );
30 }
32 static inline void _raw_spin_unlock(raw_spinlock_t *lock)
33 {
34 ASSERT(_raw_spin_is_locked(lock));
35 asm volatile (
36 "movb $1,%0"
37 : "=m" (lock->lock) : : "memory" );
38 }
40 static inline int _raw_spin_trylock(raw_spinlock_t *lock)
41 {
42 char oldval;
43 asm volatile (
44 "xchgb %b0,%1"
45 :"=q" (oldval), "=m" (lock->lock)
46 :"0" (0) : "memory" );
47 return (oldval > 0);
48 }
50 typedef struct {
51 volatile unsigned int lock;
52 } raw_rwlock_t;
54 #define _RAW_RW_LOCK_UNLOCKED /*(raw_rwlock_t)*/ { RW_LOCK_BIAS }
56 /*
57 * On x86, we implement read-write locks as a 32-bit counter
58 * with the high bit (sign) being the "contended" bit.
59 */
60 static inline void _raw_read_lock(raw_rwlock_t *rw)
61 {
62 __build_read_lock(rw, "__read_lock_failed");
63 }
65 static inline void _raw_write_lock(raw_rwlock_t *rw)
66 {
67 __build_write_lock(rw, "__write_lock_failed");
68 }
70 #define _raw_read_unlock(rw) \
71 asm volatile ( \
72 "lock ; incl %0" : \
73 "=m" ((rw)->lock) : : "memory" )
74 #define _raw_write_unlock(rw) \
75 asm volatile ( \
76 "lock ; addl $" RW_LOCK_BIAS_STR ",%0" : \
77 "=m" ((rw)->lock) : : "memory" )
79 #endif /* __ASM_SPINLOCK_H */