ia64/xen-unstable
changeset 5193:94a63704b6c4
bitkeeper revision 1.1584 (42984f31pkIsDJAxZBlaJgFWP9L2QA)
Assembly code cleanups. gcc doesn't need very many hints to get the
operand size and register names correct for both x86/32 and x86/64.
Signed-off-by: Keir Fraser <keir@xensource.com>
Assembly code cleanups. gcc doesn't need very many hints to get the
operand size and register names correct for both x86/32 and x86/64.
Signed-off-by: Keir Fraser <keir@xensource.com>
author | kaf24@firebug.cl.cam.ac.uk |
---|---|
date | Sat May 28 11:00:01 2005 +0000 (2005-05-28) |
parents | 2df0e546014d |
children | 1e610bdd54fc |
files | xen/arch/x86/flushtlb.c xen/arch/x86/vmx_vmcs.c xen/include/asm-x86/bitops.h xen/include/asm-x86/flushtlb.h xen/include/asm-x86/processor.h xen/include/asm-x86/rwlock.h |
line diff
1.1 --- a/xen/arch/x86/flushtlb.c Sat May 28 09:52:29 2005 +0000 1.2 +++ b/xen/arch/x86/flushtlb.c Sat May 28 11:00:01 2005 +0000 1.3 @@ -57,7 +57,7 @@ void write_cr3(unsigned long cr3) 1.4 */ 1.5 1.6 skip_clocktick: 1.7 - __asm__ __volatile__ ( "mov"__OS" %0, %%cr3" : : "r" (cr3) : "memory" ); 1.8 + __asm__ __volatile__ ( "mov %0, %%cr3" : : "r" (cr3) : "memory" ); 1.9 1.10 /* 1.11 * STEP 3. Update this CPU's timestamp. Note that this happens *after*
2.1 --- a/xen/arch/x86/vmx_vmcs.c Sat May 28 09:52:29 2005 +0000 2.2 +++ b/xen/arch/x86/vmx_vmcs.c Sat May 28 11:00:01 2005 +0000 2.3 @@ -316,11 +316,7 @@ construct_init_vmcs_guest(struct cpu_use 2.4 error |= __vmwrite(GUEST_EFLAGS, eflags); 2.5 2.6 error |= __vmwrite(GUEST_INTERRUPTIBILITY_INFO, 0); 2.7 -#ifdef __i386__ 2.8 __asm__ __volatile__ ("mov %%dr7, %0\n" : "=r" (dr7)); 2.9 -#else 2.10 - __asm__ __volatile__ ("movq %%dr7, %0\n" : "=r" (dr7)); 2.11 -#endif 2.12 error |= __vmwrite(GUEST_DR7, dr7); 2.13 error |= __vmwrite(GUEST_VMCS0, 0xffffffff); 2.14 error |= __vmwrite(GUEST_VMCS1, 0xffffffff); 2.15 @@ -356,21 +352,13 @@ static inline int construct_vmcs_host(st 2.16 host_env->idtr_base = desc.address; 2.17 error |= __vmwrite(HOST_IDTR_BASE, host_env->idtr_base); 2.18 2.19 -#ifdef __i386__ 2.20 - __asm__ __volatile__ ("movl %%cr0,%0" : "=r" (crn) : ); 2.21 -#else 2.22 - __asm__ __volatile__ ("movq %%cr0,%0" : "=r" (crn) : ); 2.23 -#endif 2.24 + __asm__ __volatile__ ("mov %%cr0,%0" : "=r" (crn) : ); 2.25 2.26 host_env->cr0 = crn; 2.27 error |= __vmwrite(HOST_CR0, crn); /* same CR0 */ 2.28 2.29 /* CR3 is set in vmx_final_setup_hostos */ 2.30 -#ifdef __i386__ 2.31 - __asm__ __volatile__ ("movl %%cr4,%0" : "=r" (crn) : ); 2.32 -#else 2.33 - __asm__ __volatile__ ("movq %%cr4,%0" : "=r" (crn) : ); 2.34 -#endif 2.35 + __asm__ __volatile__ ("mov %%cr4,%0" : "=r" (crn) : ); 2.36 host_env->cr4 = crn; 2.37 error |= __vmwrite(HOST_CR4, crn); 2.38 error |= __vmwrite(HOST_EIP, (unsigned long) vmx_asm_vmexit_handler);
3.1 --- a/xen/include/asm-x86/bitops.h Sat May 28 09:52:29 2005 +0000 3.2 +++ b/xen/include/asm-x86/bitops.h Sat May 28 11:00:01 2005 +0000 3.3 @@ -357,7 +357,7 @@ static inline unsigned long __scanbit(un 3.4 */ 3.5 static __inline__ unsigned long find_first_clear_bit(unsigned long word) 3.6 { 3.7 - __asm__("bsf"__OS" %1,%0" 3.8 + __asm__("bsf %1,%0" 3.9 :"=r" (word) 3.10 :"r" (~word)); 3.11 return word; 3.12 @@ -365,7 +365,7 @@ static __inline__ unsigned long find_fir 3.13 3.14 static __inline__ unsigned long find_first_set_bit(unsigned long word) 3.15 { 3.16 - __asm__("bsf"__OS" %1,%0" 3.17 + __asm__("bsf %1,%0" 3.18 :"=r" (word) 3.19 :"r" (word)); 3.20 return word;
4.1 --- a/xen/include/asm-x86/flushtlb.h Sat May 28 09:52:29 2005 +0000 4.2 +++ b/xen/include/asm-x86/flushtlb.h Sat May 28 11:00:01 2005 +0000 4.3 @@ -70,7 +70,7 @@ static inline unsigned long read_cr3(voi 4.4 { 4.5 unsigned long cr3; 4.6 __asm__ __volatile__ ( 4.7 - "mov"__OS" %%cr3, %0" : "=r" (cr3) : ); 4.8 + "mov %%cr3, %0" : "=r" (cr3) : ); 4.9 return cr3; 4.10 } 4.11
5.1 --- a/xen/include/asm-x86/processor.h Sat May 28 09:52:29 2005 +0000 5.2 +++ b/xen/include/asm-x86/processor.h Sat May 28 11:00:01 2005 +0000 5.3 @@ -255,24 +255,24 @@ static inline unsigned int cpuid_edx(uns 5.4 #define read_cr0() ({ \ 5.5 unsigned long __dummy; \ 5.6 __asm__( \ 5.7 - "mov"__OS" %%cr0,%0\n\t" \ 5.8 + "mov %%cr0,%0\n\t" \ 5.9 :"=r" (__dummy)); \ 5.10 __dummy; \ 5.11 }) 5.12 5.13 #define write_cr0(x) \ 5.14 - __asm__("mov"__OS" %0,%%cr0": :"r" ((unsigned long)x)); 5.15 + __asm__("mov %0,%%cr0": :"r" ((unsigned long)x)); 5.16 5.17 #define read_cr4() ({ \ 5.18 unsigned long __dummy; \ 5.19 __asm__( \ 5.20 - "mov"__OS" %%cr4,%0\n\t" \ 5.21 + "mov %%cr4,%0\n\t" \ 5.22 :"=r" (__dummy)); \ 5.23 __dummy; \ 5.24 }) 5.25 5.26 #define write_cr4(x) \ 5.27 - __asm__("mov"__OS" %0,%%cr4": :"r" ((unsigned long)x)); 5.28 + __asm__("mov %0,%%cr4": :"r" ((unsigned long)x)); 5.29 5.30 /* 5.31 * Save the cr4 feature set we're using (ie 5.32 @@ -284,22 +284,24 @@ extern unsigned long mmu_cr4_features; 5.33 5.34 static inline void set_in_cr4 (unsigned long mask) 5.35 { 5.36 + unsigned long dummy; 5.37 mmu_cr4_features |= mask; 5.38 - __asm__("mov"__OS" %%cr4,%%"__OP"ax\n\t" 5.39 - "or"__OS" %0,%%"__OP"ax\n\t" 5.40 - "mov"__OS" %%"__OP"ax,%%cr4\n" 5.41 - : : "irg" (mask) 5.42 - :"ax"); 5.43 + __asm__ __volatile__ ( 5.44 + "mov %%cr4,%0\n\t" 5.45 + "or %1,%0\n\t" 5.46 + "mov %0,%%cr4\n" 5.47 + : "=&r" (dummy) : "irg" (mask) ); 5.48 } 5.49 5.50 static inline void clear_in_cr4 (unsigned long mask) 5.51 { 5.52 + unsigned long dummy; 5.53 mmu_cr4_features &= ~mask; 5.54 - __asm__("mov"__OS" %%cr4,%%"__OP"ax\n\t" 5.55 - "and"__OS" %0,%%"__OP"ax\n\t" 5.56 - "mov"__OS" %%"__OP"ax,%%cr4\n" 5.57 - : : "irg" (~mask) 5.58 - :"ax"); 5.59 + __asm__ __volatile__ ( 5.60 + "mov %%cr4,%0\n\t" 5.61 + "and %1,%0\n\t" 5.62 + "mov %0,%%cr4\n" 5.63 + : "=&r" (dummy) : "irg" (~mask) ); 5.64 } 5.65 5.66 /*
6.1 --- a/xen/include/asm-x86/rwlock.h Sat May 28 09:52:29 2005 +0000 6.2 +++ b/xen/include/asm-x86/rwlock.h Sat May 28 11:00:01 2005 +0000 6.3 @@ -35,10 +35,10 @@ 6.4 "js 2f\n" \ 6.5 "1:\n" \ 6.6 ".section .text.lock,\"ax\"\n" \ 6.7 - "2:\tpush"__OS" %%"__OP"ax\n\t" \ 6.8 - "lea"__OS" %0,%%"__OP"ax\n\t" \ 6.9 + "2:\tpush %%"__OP"ax\n\t" \ 6.10 + "lea %0,%%"__OP"ax\n\t" \ 6.11 "call " helper "\n\t" \ 6.12 - "pop"__OS" %%"__OP"ax\n\t" \ 6.13 + "pop %%"__OP"ax\n\t" \ 6.14 "jmp 1b\n" \ 6.15 ".previous" \ 6.16 :"=m" (*(volatile int *)rw) : : "memory") 6.17 @@ -65,10 +65,10 @@ 6.18 "jnz 2f\n" \ 6.19 "1:\n" \ 6.20 ".section .text.lock,\"ax\"\n" \ 6.21 - "2:\tpush"__OS" %%"__OP"ax\n\t" \ 6.22 - "lea"__OS" %0,%%"__OP"ax\n\t" \ 6.23 + "2:\tpush %%"__OP"ax\n\t" \ 6.24 + "lea %0,%%"__OP"ax\n\t" \ 6.25 "call " helper "\n\t" \ 6.26 - "pop"__OS" %%"__OP"ax\n\t" \ 6.27 + "pop %%"__OP"ax\n\t" \ 6.28 "jmp 1b\n" \ 6.29 ".previous" \ 6.30 :"=m" (*(volatile int *)rw) : : "memory")