ia64/xen-unstable

changeset 11438:698eb277331c

[XEN] Fix bitops inline asm to specify that memory is clobbered.
Necessary because the modified word may not be the one directly
addressed by the memory parameter (since the parameter actually
points at an array, not a scalar value).

The change to set_bit/__set_bit is reported to fix an issue
on x460 hardware, as reported (and this fix suggested) by
Ryan Harper <ryanh@us.ibm.com>

Signed-off-by: Keir Fraser <keir@xensource.com>
author kfraser@ubuntu.eng.hq.xensource.com
date Tue Sep 05 18:28:27 2006 -0700 (2006-09-05)
parents 66dd34f2f439
children 383bc7c7b19e
files xen/include/asm-x86/bitops.h
line diff
     1.1 --- a/xen/include/asm-x86/bitops.h	Tue Sep 05 12:20:31 2006 -0700
     1.2 +++ b/xen/include/asm-x86/bitops.h	Tue Sep 05 18:28:27 2006 -0700
     1.3 @@ -7,20 +7,19 @@
     1.4  
     1.5  #include <xen/config.h>
     1.6  
     1.7 -/*
     1.8 - * These have to be done with inline assembly: that way the bit-setting
     1.9 - * is guaranteed to be atomic. All bit operations return 0 if the bit
    1.10 - * was cleared before the operation and != 0 if it was not.
    1.11 - *
    1.12 - * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
    1.13 - */
    1.14 -
    1.15  #ifdef CONFIG_SMP
    1.16  #define LOCK_PREFIX "lock ; "
    1.17  #else
    1.18  #define LOCK_PREFIX ""
    1.19  #endif
    1.20  
    1.21 +/*
    1.22 + * We use the "+m" constraint because the memory operand is both read from
    1.23 + * and written to. Since the operand is in fact a word array, we also
    1.24 + * specify "memory" in the clobbers list to indicate that words other than
    1.25 + * the one directly addressed by the memory operand may be modified.
    1.26 + */
    1.27 +
    1.28  #define ADDR (*(volatile long *) addr)
    1.29  
    1.30  /**
    1.31 @@ -37,8 +36,8 @@ static __inline__ void set_bit(int nr, v
    1.32  {
    1.33  	__asm__ __volatile__( LOCK_PREFIX
    1.34  		"btsl %1,%0"
    1.35 -		:"=m" (ADDR)
    1.36 -		:"dIr" (nr));
    1.37 +		:"+m" (ADDR)
    1.38 +		:"dIr" (nr) : "memory");
    1.39  }
    1.40  
    1.41  /**
    1.42 @@ -54,8 +53,8 @@ static __inline__ void __set_bit(int nr,
    1.43  {
    1.44  	__asm__(
    1.45  		"btsl %1,%0"
    1.46 -		:"=m" (ADDR)
    1.47 -		:"dIr" (nr));
    1.48 +		:"+m" (ADDR)
    1.49 +		:"dIr" (nr) : "memory");
    1.50  }
    1.51  
    1.52  /**
    1.53 @@ -72,8 +71,8 @@ static __inline__ void clear_bit(int nr,
    1.54  {
    1.55  	__asm__ __volatile__( LOCK_PREFIX
    1.56  		"btrl %1,%0"
    1.57 -		:"=m" (ADDR)
    1.58 -		:"dIr" (nr));
    1.59 +		:"+m" (ADDR)
    1.60 +		:"dIr" (nr) : "memory");
    1.61  }
    1.62  
    1.63  /**
    1.64 @@ -89,8 +88,8 @@ static __inline__ void __clear_bit(int n
    1.65  {
    1.66  	__asm__(
    1.67  		"btrl %1,%0"
    1.68 -		:"=m" (ADDR)
    1.69 -		:"dIr" (nr));
    1.70 +		:"+m" (ADDR)
    1.71 +		:"dIr" (nr) : "memory");
    1.72  }
    1.73  
    1.74  #define smp_mb__before_clear_bit()	barrier()
    1.75 @@ -109,8 +108,8 @@ static __inline__ void __change_bit(int 
    1.76  {
    1.77  	__asm__ __volatile__(
    1.78  		"btcl %1,%0"
    1.79 -		:"=m" (ADDR)
    1.80 -		:"dIr" (nr));
    1.81 +		:"+m" (ADDR)
    1.82 +		:"dIr" (nr) : "memory");
    1.83  }
    1.84  
    1.85  /**
    1.86 @@ -126,8 +125,8 @@ static __inline__ void change_bit(int nr
    1.87  {
    1.88  	__asm__ __volatile__( LOCK_PREFIX
    1.89  		"btcl %1,%0"
    1.90 -		:"=m" (ADDR)
    1.91 -		:"dIr" (nr));
    1.92 +		:"+m" (ADDR)
    1.93 +		:"dIr" (nr) : "memory");
    1.94  }
    1.95  
    1.96  /**
    1.97 @@ -144,7 +143,7 @@ static __inline__ int test_and_set_bit(i
    1.98  
    1.99  	__asm__ __volatile__( LOCK_PREFIX
   1.100  		"btsl %2,%1\n\tsbbl %0,%0"
   1.101 -		:"=r" (oldbit),"=m" (ADDR)
   1.102 +		:"=r" (oldbit),"+m" (ADDR)
   1.103  		:"dIr" (nr) : "memory");
   1.104  	return oldbit;
   1.105  }
   1.106 @@ -164,8 +163,8 @@ static __inline__ int __test_and_set_bit
   1.107  
   1.108  	__asm__(
   1.109  		"btsl %2,%1\n\tsbbl %0,%0"
   1.110 -		:"=r" (oldbit),"=m" (ADDR)
   1.111 -		:"dIr" (nr));
   1.112 +		:"=r" (oldbit),"+m" (ADDR)
   1.113 +		:"dIr" (nr) : "memory");
   1.114  	return oldbit;
   1.115  }
   1.116  
   1.117 @@ -183,7 +182,7 @@ static __inline__ int test_and_clear_bit
   1.118  
   1.119  	__asm__ __volatile__( LOCK_PREFIX
   1.120  		"btrl %2,%1\n\tsbbl %0,%0"
   1.121 -		:"=r" (oldbit),"=m" (ADDR)
   1.122 +		:"=r" (oldbit),"+m" (ADDR)
   1.123  		:"dIr" (nr) : "memory");
   1.124  	return oldbit;
   1.125  }
   1.126 @@ -203,8 +202,8 @@ static __inline__ int __test_and_clear_b
   1.127  
   1.128  	__asm__(
   1.129  		"btrl %2,%1\n\tsbbl %0,%0"
   1.130 -		:"=r" (oldbit),"=m" (ADDR)
   1.131 -		:"dIr" (nr));
   1.132 +		:"=r" (oldbit),"+m" (ADDR)
   1.133 +		:"dIr" (nr) : "memory");
   1.134  	return oldbit;
   1.135  }
   1.136  
   1.137 @@ -215,7 +214,7 @@ static __inline__ int __test_and_change_
   1.138  
   1.139  	__asm__ __volatile__(
   1.140  		"btcl %2,%1\n\tsbbl %0,%0"
   1.141 -		:"=r" (oldbit),"=m" (ADDR)
   1.142 +		:"=r" (oldbit),"+m" (ADDR)
   1.143  		:"dIr" (nr) : "memory");
   1.144  	return oldbit;
   1.145  }
   1.146 @@ -234,7 +233,7 @@ static __inline__ int test_and_change_bi
   1.147  
   1.148  	__asm__ __volatile__( LOCK_PREFIX
   1.149  		"btcl %2,%1\n\tsbbl %0,%0"
   1.150 -		:"=r" (oldbit),"=m" (ADDR)
   1.151 +		:"=r" (oldbit),"+m" (ADDR)
   1.152  		:"dIr" (nr) : "memory");
   1.153  	return oldbit;
   1.154  }
   1.155 @@ -242,7 +241,7 @@ static __inline__ int test_and_change_bi
   1.156  
   1.157  static __inline__ int constant_test_bit(int nr, const volatile void * addr)
   1.158  {
   1.159 -	return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0;
   1.160 +	return ((1U << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0;
   1.161  }
   1.162  
   1.163  static __inline__ int variable_test_bit(int nr, volatile void * addr)