ia64/xen-unstable

view xen/include/asm-x86/x86_32/uaccess.h @ 6552:a9873d384da4

Merge.
author adsharma@los-vmm.sc.intel.com
date Thu Aug 25 12:24:48 2005 -0700 (2005-08-25)
parents 112d44270733 fa0754a9f64f
children dfaf788ab18c
line source
1 #ifndef __i386_UACCESS_H
2 #define __i386_UACCESS_H
4 #define __addr_ok(addr) ((unsigned long)(addr) < HYPERVISOR_VIRT_START)
6 /*
7 * Test whether a block of memory is a valid user space address.
8 * Returns 0 if the range is valid, nonzero otherwise.
9 *
10 * This is equivalent to the following test:
11 * (u33)addr + (u33)size >= (u33)HYPERVISOR_VIRT_START
12 */
13 #define __range_not_ok(addr,size) ({ \
14 unsigned long flag,sum; \
15 asm("addl %3,%1 ; sbbl %0,%0; cmpl %1,%4; sbbl $0,%0" \
16 :"=&r" (flag), "=r" (sum) \
17 :"1" (addr),"g" ((int)(size)),"r" (HYPERVISOR_VIRT_START)); \
18 flag; })
20 #define access_ok(addr,size) (likely(__range_not_ok(addr,size) == 0))
22 #define array_access_ok(addr,count,size) \
23 (likely(count < (~0UL/size)) && access_ok(addr,count*size))
25 /* Undefined function to catch size mismatches on 64-bit get_user/put_user. */
26 extern void __uaccess_var_not_u64(void);
28 #define __put_user_u64(x, addr, retval, errret) \
29 if (sizeof(x) != 8) __uaccess_var_not_u64(); \
30 __asm__ __volatile__( \
31 "1: movl %%eax,0(%2)\n" \
32 "2: movl %%edx,4(%2)\n" \
33 "3:\n" \
34 ".section .fixup,\"ax\"\n" \
35 "4: movl %3,%0\n" \
36 " jmp 3b\n" \
37 ".previous\n" \
38 ".section __ex_table,\"a\"\n" \
39 " .align 4\n" \
40 " .long 1b,4b\n" \
41 " .long 2b,4b\n" \
42 ".previous" \
43 : "=r"(retval) \
44 : "A" (x), "r" (addr), "i"(errret), "0"(retval))
46 #define __put_user_size(x,ptr,size,retval,errret) \
47 do { \
48 retval = 0; \
49 switch (size) { \
50 case 1: __put_user_asm(x,ptr,retval,"b","b","iq",errret);break; \
51 case 2: __put_user_asm(x,ptr,retval,"w","w","ir",errret);break; \
52 case 4: __put_user_asm(x,ptr,retval,"l","","ir",errret); break; \
53 case 8: __put_user_u64((__typeof__(*ptr))(x),ptr,retval,errret);break;\
54 default: __put_user_bad(); \
55 } \
56 } while (0)
58 #define __get_user_u64(x, addr, retval, errret) \
59 if (sizeof(x) != 8) __uaccess_var_not_u64(); \
60 __asm__ __volatile__( \
61 "1: movl 0(%2),%%eax\n" \
62 "2: movl 4(%2),%%edx\n" \
63 "3:\n" \
64 ".section .fixup,\"ax\"\n" \
65 "4: movl %3,%0\n" \
66 " xorl %%eax,%%eax\n" \
67 " xorl %%edx,%%edx\n" \
68 " jmp 3b\n" \
69 ".previous\n" \
70 ".section __ex_table,\"a\"\n" \
71 " .align 4\n" \
72 " .long 1b,4b\n" \
73 " .long 2b,4b\n" \
74 ".previous" \
75 : "=r" (retval), "=&A" (x) \
76 : "r" (addr), "i"(errret), "0"(retval))
78 #define __get_user_size(x,ptr,size,retval,errret) \
79 do { \
80 retval = 0; \
81 switch (size) { \
82 case 1: __get_user_asm(x,ptr,retval,"b","b","=q",errret);break; \
83 case 2: __get_user_asm(x,ptr,retval,"w","w","=r",errret);break; \
84 case 4: __get_user_asm(x,ptr,retval,"l","","=r",errret);break; \
85 case 8: __get_user_u64(x,ptr,retval,errret);break; \
86 default: (x) = __get_user_bad(); \
87 } \
88 } while (0)
90 #endif /* __i386_UACCESS_H */