..., rendering affected code more efficient and smaller.
Note that in atomic.h this at once does away with the redundant output
and input specifications of the memory location touched.
Signed-off-by: Jan Beulich <jbeulich@suse.com>
Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com>
Acked-by: Kevin Tian <kevin.tian@intel.com>
*/
static bool_t even_parity(uint8_t v)
{
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm ( "test %1,%1" : "=@ccp" (v) : "q" (v) );
+#else
asm ( "test %1,%1; setp %0" : "=qm" (v) : "q" (v) );
+#endif
+
return v;
}
static bool_t mul_dbl(unsigned long m[2])
{
bool_t rc;
+
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm ( "mul %1" : "+a" (m[0]), "+d" (m[1]), "=@cco" (rc) );
+#else
asm ( "mul %1; seto %2"
: "+a" (m[0]), "+d" (m[1]), "=qm" (rc) );
+#endif
+
return rc;
}
static bool_t imul_dbl(unsigned long m[2])
{
bool_t rc;
+
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm ( "imul %1" : "+a" (m[0]), "+d" (m[1]), "=@cco" (rc) );
+#else
asm ( "imul %1; seto %2"
: "+a" (m[0]), "+d" (m[1]), "=qm" (rc) );
+#endif
+
return rc;
}
case 0xbc: /* bsf or tzcnt */ {
bool_t zf;
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm ( "bsf %2,%0"
+ : "=r" (dst.val), "=@ccz" (zf)
+ : "rm" (src.val) );
+#else
asm ( "bsf %2,%0; setz %1"
: "=r" (dst.val), "=qm" (zf)
: "rm" (src.val) );
+#endif
_regs.eflags &= ~EFLG_ZF;
if ( (vex.pfx == vex_f3) && vcpu_has_bmi1() )
{
case 0xbd: /* bsr or lzcnt */ {
bool_t zf;
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm ( "bsr %2,%0"
+ : "=r" (dst.val), "=@ccz" (zf)
+ : "rm" (src.val) );
+#else
asm ( "bsr %2,%0; setz %1"
: "=r" (dst.val), "=qm" (zf)
: "rm" (src.val) );
+#endif
_regs.eflags &= ~EFLG_ZF;
if ( (vex.pfx == vex_f3) && vcpu_has_lzcnt() )
{
static inline int atomic_sub_and_test(int i, atomic_t *v)
{
- unsigned char c;
+ bool_t c;
+
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "lock; subl %2,%0"
+ : "+m" (*(volatile int *)&v->counter), "=@ccz" (c)
+ : "ir" (i) : "memory" );
+#else
+ asm volatile ( "lock; subl %2,%0; setz %1"
+ : "+m" (*(volatile int *)&v->counter), "=qm" (c)
+ : "ir" (i) : "memory" );
+#endif
- asm volatile (
- "lock; subl %2,%0; sete %1"
- : "=m" (*(volatile int *)&v->counter), "=qm" (c)
- : "ir" (i), "m" (*(volatile int *)&v->counter) : "memory" );
return c;
}
static inline int atomic_inc_and_test(atomic_t *v)
{
- unsigned char c;
+ bool_t c;
- asm volatile (
- "lock; incl %0; sete %1"
- : "=m" (*(volatile int *)&v->counter), "=qm" (c)
- : "m" (*(volatile int *)&v->counter) : "memory" );
- return c != 0;
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "lock; incl %0"
+ : "+m" (*(volatile int *)&v->counter), "=@ccz" (c)
+ :: "memory" );
+#else
+ asm volatile ( "lock; incl %0; setz %1"
+ : "+m" (*(volatile int *)&v->counter), "=qm" (c)
+ :: "memory" );
+#endif
+
+ return c;
}
static inline void atomic_dec(atomic_t *v)
static inline int atomic_dec_and_test(atomic_t *v)
{
- unsigned char c;
+ bool_t c;
- asm volatile (
- "lock; decl %0; sete %1"
- : "=m" (*(volatile int *)&v->counter), "=qm" (c)
- : "m" (*(volatile int *)&v->counter) : "memory" );
- return c != 0;
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "lock; decl %0"
+ : "+m" (*(volatile int *)&v->counter), "=@ccz" (c)
+ :: "memory" );
+#else
+ asm volatile ( "lock; decl %0; setz %1"
+ : "+m" (*(volatile int *)&v->counter), "=qm" (c)
+ :: "memory" );
+#endif
+
+ return c;
}
static inline int atomic_add_negative(int i, atomic_t *v)
{
- unsigned char c;
+ bool_t c;
+
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "lock; addl %2,%0"
+ : "+m" (*(volatile int *)&v->counter), "=@ccs" (c)
+ : "ir" (i) : "memory" );
+#else
+ asm volatile ( "lock; addl %2,%0; sets %1"
+ : "+m" (*(volatile int *)&v->counter), "=qm" (c)
+ : "ir" (i) : "memory" );
+#endif
- asm volatile (
- "lock; addl %2,%0; sets %1"
- : "=m" (*(volatile int *)&v->counter), "=qm" (c)
- : "ir" (i), "m" (*(volatile int *)&v->counter) : "memory" );
return c;
}
{
int oldbit;
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "lock; btsl %2,%1"
+ : "=@ccc" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory" );
+#else
asm volatile ( "lock; btsl %2,%1\n\tsbbl %0,%0"
- : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
+ : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory" );
+#endif
+
return oldbit;
}
#define test_and_set_bit(nr, addr) ({ \
{
int oldbit;
- asm volatile (
- "btsl %2,%1\n\tsbbl %0,%0"
- : "=r" (oldbit), "+m" (*(int *)addr)
- : "Ir" (nr) : "memory" );
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "btsl %2,%1"
+ : "=@ccc" (oldbit), "+m" (*(int *)addr)
+ : "Ir" (nr) : "memory" );
+#else
+ asm volatile ( "btsl %2,%1\n\tsbbl %0,%0"
+ : "=r" (oldbit), "+m" (*(int *)addr)
+ : "Ir" (nr) : "memory" );
+#endif
+
return oldbit;
}
#define __test_and_set_bit(nr, addr) ({ \
{
int oldbit;
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "lock; btrl %2,%1"
+ : "=@ccc" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory" );
+#else
asm volatile ( "lock; btrl %2,%1\n\tsbbl %0,%0"
- : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
+ : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory" );
+#endif
+
return oldbit;
}
#define test_and_clear_bit(nr, addr) ({ \
{
int oldbit;
- asm volatile (
- "btrl %2,%1\n\tsbbl %0,%0"
- : "=r" (oldbit), "+m" (*(int *)addr)
- : "Ir" (nr) : "memory" );
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "btrl %2,%1"
+ : "=@ccc" (oldbit), "+m" (*(int *)addr)
+ : "Ir" (nr) : "memory" );
+#else
+ asm volatile ( "btrl %2,%1\n\tsbbl %0,%0"
+ : "=r" (oldbit), "+m" (*(int *)addr)
+ : "Ir" (nr) : "memory" );
+#endif
+
return oldbit;
}
#define __test_and_clear_bit(nr, addr) ({ \
{
int oldbit;
- asm volatile (
- "btcl %2,%1\n\tsbbl %0,%0"
- : "=r" (oldbit), "+m" (*(int *)addr)
- : "Ir" (nr) : "memory" );
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "btcl %2,%1"
+ : "=@ccc" (oldbit), "+m" (*(int *)addr)
+ : "Ir" (nr) : "memory" );
+#else
+ asm volatile ( "btcl %2,%1\n\tsbbl %0,%0"
+ : "=r" (oldbit), "+m" (*(int *)addr)
+ : "Ir" (nr) : "memory" );
+#endif
+
return oldbit;
}
#define __test_and_change_bit(nr, addr) ({ \
{
int oldbit;
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "lock; btcl %2,%1"
+ : "=@ccc" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory" );
+#else
asm volatile ( "lock; btcl %2,%1\n\tsbbl %0,%0"
- : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory");
+ : "=r" (oldbit), "+m" (ADDR) : "Ir" (nr) : "memory" );
+#endif
+
return oldbit;
}
#define test_and_change_bit(nr, addr) ({ \
{
int oldbit;
- asm volatile (
- "btl %2,%1\n\tsbbl %0,%0"
- : "=r" (oldbit)
- : "m" (CONST_ADDR), "Ir" (nr) : "memory" );
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ asm volatile ( "btl %2,%1"
+ : "=@ccc" (oldbit)
+ : "m" (CONST_ADDR), "Ir" (nr) : "memory" );
+#else
+ asm volatile ( "btl %2,%1\n\tsbbl %0,%0"
+ : "=r" (oldbit)
+ : "m" (CONST_ADDR), "Ir" (nr) : "memory" );
+#endif
+
return oldbit;
}
VMREAD_OPCODE MODRM_EAX_ECX
#endif
/* CF==1 or ZF==1 --> rc = 0 */
+#ifdef __GCC_ASM_FLAG_OUTPUTS__
+ : "=@ccnbe" (okay),
+#else
"setnbe %0"
+ : "=qm" (okay),
+#endif
#ifdef HAVE_GAS_VMX
- : "=qm" (okay), "=rm" (*value)
+ "=rm" (*value)
: "r" (field));
#else
- : "=qm" (okay), "=c" (*value)
+ "=c" (*value)
: "a" (field));
#endif