void _read_lock(rwlock_t *lock)
{
check_lock(&lock->debug);
- _raw_read_lock(&lock->raw);
+ while ( unlikely(!_raw_read_trylock(&lock->raw)) )
+ {
+ while ( likely(_raw_rw_is_write_locked(&lock->raw)) )
+ cpu_relax();
+ }
preempt_disable();
}
ASSERT(local_irq_is_enabled());
local_irq_disable();
check_lock(&lock->debug);
- _raw_read_lock(&lock->raw);
+ while ( unlikely(!_raw_read_trylock(&lock->raw)) )
+ {
+ local_irq_enable();
+ while ( likely(_raw_rw_is_write_locked(&lock->raw)) )
+ cpu_relax();
+ local_irq_disable();
+ }
preempt_disable();
}
unsigned long flags;
local_irq_save(flags);
check_lock(&lock->debug);
- _raw_read_lock(&lock->raw);
+ while ( unlikely(!_raw_read_trylock(&lock->raw)) )
+ {
+ local_irq_restore(flags);
+ while ( likely(_raw_rw_is_write_locked(&lock->raw)) )
+ cpu_relax();
+ local_irq_save(flags);
+ }
preempt_disable();
return flags;
}
+int _read_trylock(rwlock_t *lock)
+{
+ check_lock(&lock->debug);
+ if ( !_raw_read_trylock(&lock->raw) )
+ return 0;
+ preempt_disable();
+ return 1;
+}
+
void _read_unlock(rwlock_t *lock)
{
preempt_enable();
void _write_lock(rwlock_t *lock)
{
check_lock(&lock->debug);
- _raw_write_lock(&lock->raw);
+ while ( unlikely(!_raw_write_trylock(&lock->raw)) )
+ {
+ while ( likely(_raw_rw_is_locked(&lock->raw)) )
+ cpu_relax();
+ }
preempt_disable();
}
ASSERT(local_irq_is_enabled());
local_irq_disable();
check_lock(&lock->debug);
- _raw_write_lock(&lock->raw);
+ while ( unlikely(!_raw_write_trylock(&lock->raw)) )
+ {
+ local_irq_enable();
+ while ( likely(_raw_rw_is_locked(&lock->raw)) )
+ cpu_relax();
+ local_irq_disable();
+ }
preempt_disable();
}
unsigned long flags;
local_irq_save(flags);
check_lock(&lock->debug);
- _raw_write_lock(&lock->raw);
+ while ( unlikely(!_raw_write_trylock(&lock->raw)) )
+ {
+ local_irq_restore(flags);
+ while ( likely(_raw_rw_is_locked(&lock->raw)) )
+ cpu_relax();
+ local_irq_save(flags);
+ }
preempt_disable();
return flags;
}
} raw_rwlock_t;
#define _RAW_RW_LOCK_UNLOCKED /*(raw_rwlock_t)*/ { 0, 0 }
-#define _raw_read_lock(rw) \
-do { \
- raw_rwlock_t *__read_lock_ptr = (rw); \
- \
- while (unlikely(ia64_fetchadd(1, (int *) __read_lock_ptr, acq) < 0)) { \
- ia64_fetchadd(-1, (int *) __read_lock_ptr, rel); \
- while (*(volatile int *)__read_lock_ptr < 0) \
- cpu_relax(); \
- } \
-} while (0)
-
#define _raw_read_unlock(rw) \
do { \
raw_rwlock_t *__read_lock_ptr = (rw); \
} while (0)
#ifdef ASM_SUPPORTED
-#define _raw_write_lock(rw) \
-do { \
- __asm__ __volatile__ ( \
- "mov ar.ccv = r0\n" \
- "dep r29 = -1, r0, 31, 1;;\n" \
- "1:\n" \
- "ld4 r2 = [%0];;\n" \
- "cmp4.eq p0,p7 = r0,r2\n" \
- "(p7) br.cond.spnt.few 1b \n" \
- "cmpxchg4.acq r2 = [%0], r29, ar.ccv;;\n" \
- "cmp4.eq p0,p7 = r0, r2\n" \
- "(p7) br.cond.spnt.few 1b;;\n" \
- :: "r"(rw) : "ar.ccv", "p7", "r2", "r29", "memory"); \
-} while(0)
#define _raw_write_trylock(rw) \
({ \
#else /* !ASM_SUPPORTED */
-#define _raw_write_lock(l) \
-({ \
- __u64 ia64_val, ia64_set_val = ia64_dep_mi(-1, 0, 31, 1); \
- __u32 *ia64_write_lock_ptr = (__u32 *) (l); \
- do { \
- while (*ia64_write_lock_ptr) \
- ia64_barrier(); \
- ia64_val = ia64_cmpxchg4_acq(ia64_write_lock_ptr, ia64_set_val, 0); \
- } while (ia64_val); \
-})
#define _raw_write_trylock(rw) \
({ \
volatile int lock;
} raw_rwlock_t;
-#define RW_LOCK_BIAS 0x01000000
-#define _RAW_RW_LOCK_UNLOCKED /*(raw_rwlock_t)*/ { RW_LOCK_BIAS }
+#define RW_WRITE_BIAS 0x7fffffff
+#define _RAW_RW_LOCK_UNLOCKED /*(raw_rwlock_t)*/ { 0 }
-static always_inline void _raw_read_lock(raw_rwlock_t *rw)
+static always_inline int _raw_read_trylock(raw_rwlock_t *rw)
{
- asm volatile (
- "1: lock; decl %0 \n"
- " jns 3f \n"
- " lock; incl %0 \n"
- "2: rep; nop \n"
- " cmpl $1,%0 \n"
- " js 2b \n"
- " jmp 1b \n"
- "3:"
- : "=m" (rw->lock) : : "memory" );
-}
+ bool_t acquired;
-static always_inline void _raw_write_lock(raw_rwlock_t *rw)
-{
asm volatile (
- "1: lock; subl %1,%0 \n"
- " jz 3f \n"
- " lock; addl %1,%0 \n"
- "2: rep; nop \n"
- " cmpl %1,%0 \n"
- " jne 2b \n"
+ " lock; decl %0 \n"
+ " jns 2f \n"
+ "1: .subsection 1 \n"
+ "2: lock; incl %0 \n"
+ " dec %1 \n"
" jmp 1b \n"
- "3:"
- : "=m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory" );
+ " .subsection 0 \n"
+ : "=m" (rw->lock), "=r" (acquired) : "1" (1) : "memory" );
+
+ return acquired;
}
static always_inline int _raw_write_trylock(raw_rwlock_t *rw)
{
- int rc;
-
- asm volatile (
- " lock; subl %2,%0 \n"
- " jz 1f \n"
- " lock; addl %2,%0 \n"
- " dec %1 \n"
- "1:"
- : "=m" (rw->lock), "=r" (rc) : "i" (RW_LOCK_BIAS), "1" (1)
- : "memory" );
-
- return rc;
+ return (cmpxchg(&rw->lock, 0, RW_WRITE_BIAS) == 0);
}
static always_inline void _raw_read_unlock(raw_rwlock_t *rw)
static always_inline void _raw_write_unlock(raw_rwlock_t *rw)
{
asm volatile (
- "lock ; addl %1,%0"
- : "=m" ((rw)->lock) : "i" (RW_LOCK_BIAS) : "memory" );
+ "lock ; subl %1,%0"
+ : "=m" ((rw)->lock) : "i" (RW_WRITE_BIAS) : "memory" );
}
-#define _raw_rw_is_locked(x) ((x)->lock < RW_LOCK_BIAS)
-#define _raw_rw_is_write_locked(x) ((x)->lock <= 0)
+#define _raw_rw_is_locked(x) ((x)->lock != 0)
+#define _raw_rw_is_write_locked(x) ((x)->lock > 0)
#endif /* __ASM_SPINLOCK_H */
void _read_unlock(rwlock_t *lock);
void _read_unlock_irq(rwlock_t *lock);
void _read_unlock_irqrestore(rwlock_t *lock, unsigned long flags);
+int _read_trylock(rwlock_t *lock);
void _write_lock(rwlock_t *lock);
void _write_lock_irq(rwlock_t *lock);
#define read_unlock(l) _read_unlock(l)
#define read_unlock_irq(l) _read_unlock_irq(l)
#define read_unlock_irqrestore(l, f) _read_unlock_irqrestore(l, f)
+#define read_trylock(l) _read_trylock(l)
#define write_lock(l) _write_lock(l)
#define write_lock_irq(l) _write_lock_irq(l)