ia64/xen-unstable

view xen/include/asm-x86/spinlock.h @ 19848:5839491bbf20

[IA64] replace MAX_VCPUS with d->max_vcpus where necessary.

don't use MAX_VCPUS, and use vcpu::max_vcpus.
The changeset of 2f9e1348aa98 introduced max_vcpus to allow more vcpus
per guest. This patch is ia64 counter part.

Signed-off-by: Isaku Yamahata <yamahata@valinux.co.jp>
author Isaku Yamahata <yamahata@valinux.co.jp>
date Mon Jun 29 11:26:05 2009 +0900 (2009-06-29)
parents f210a633571c
children
line source
1 #ifndef __ASM_SPINLOCK_H
2 #define __ASM_SPINLOCK_H
4 #include <xen/config.h>
5 #include <xen/lib.h>
6 #include <asm/atomic.h>
8 typedef struct {
9 volatile s16 lock;
10 } raw_spinlock_t;
12 #define _RAW_SPIN_LOCK_UNLOCKED /*(raw_spinlock_t)*/ { 1 }
14 #define _raw_spin_is_locked(x) ((x)->lock <= 0)
16 static always_inline void _raw_spin_unlock(raw_spinlock_t *lock)
17 {
18 ASSERT(_raw_spin_is_locked(lock));
19 asm volatile (
20 "movw $1,%0"
21 : "=m" (lock->lock) : : "memory" );
22 }
24 static always_inline int _raw_spin_trylock(raw_spinlock_t *lock)
25 {
26 s16 oldval;
27 asm volatile (
28 "xchgw %w0,%1"
29 :"=r" (oldval), "=m" (lock->lock)
30 :"0" (0) : "memory" );
31 return (oldval > 0);
32 }
34 typedef struct {
35 volatile int lock;
36 } raw_rwlock_t;
38 #define RW_LOCK_BIAS 0x01000000
39 #define _RAW_RW_LOCK_UNLOCKED /*(raw_rwlock_t)*/ { RW_LOCK_BIAS }
41 static always_inline void _raw_read_lock(raw_rwlock_t *rw)
42 {
43 asm volatile (
44 "1: lock; decl %0 \n"
45 " jns 3f \n"
46 " lock; incl %0 \n"
47 "2: rep; nop \n"
48 " cmpl $1,%0 \n"
49 " js 2b \n"
50 " jmp 1b \n"
51 "3:"
52 : "=m" (rw->lock) : : "memory" );
53 }
55 static always_inline void _raw_write_lock(raw_rwlock_t *rw)
56 {
57 asm volatile (
58 "1: lock; subl %1,%0 \n"
59 " jz 3f \n"
60 " lock; addl %1,%0 \n"
61 "2: rep; nop \n"
62 " cmpl %1,%0 \n"
63 " jne 2b \n"
64 " jmp 1b \n"
65 "3:"
66 : "=m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory" );
67 }
69 static always_inline int _raw_write_trylock(raw_rwlock_t *rw)
70 {
71 int rc;
73 asm volatile (
74 " lock; subl %2,%0 \n"
75 " jz 1f \n"
76 " lock; addl %2,%0 \n"
77 " dec %1 \n"
78 "1:"
79 : "=m" (rw->lock), "=r" (rc) : "i" (RW_LOCK_BIAS), "1" (1)
80 : "memory" );
82 return rc;
83 }
85 static always_inline void _raw_read_unlock(raw_rwlock_t *rw)
86 {
87 asm volatile (
88 "lock ; incl %0"
89 : "=m" ((rw)->lock) : : "memory" );
90 }
92 static always_inline void _raw_write_unlock(raw_rwlock_t *rw)
93 {
94 asm volatile (
95 "lock ; addl %1,%0"
96 : "=m" ((rw)->lock) : "i" (RW_LOCK_BIAS) : "memory" );
97 }
99 #define _raw_rw_is_locked(x) ((x)->lock < RW_LOCK_BIAS)
100 #define _raw_rw_is_write_locked(x) ((x)->lock <= 0)
102 #endif /* __ASM_SPINLOCK_H */