ia64/xen-unstable

view xen/include/asm-x86/string.h @ 19848:5839491bbf20

[IA64] replace MAX_VCPUS with d->max_vcpus where necessary.

don't use MAX_VCPUS, and use vcpu::max_vcpus.
The changeset of 2f9e1348aa98 introduced max_vcpus to allow more vcpus
per guest. This patch is ia64 counter part.

Signed-off-by: Isaku Yamahata <yamahata@valinux.co.jp>
author Isaku Yamahata <yamahata@valinux.co.jp>
date Mon Jun 29 11:26:05 2009 +0900 (2009-06-29)
parents ad632e4f26d4
children
line source
1 #ifndef __X86_STRING_H__
2 #define __X86_STRING_H__
4 #include <xen/config.h>
6 static inline void *__variable_memcpy(void *to, const void *from, size_t n)
7 {
8 long d0, d1, d2;
9 __asm__ __volatile__ (
10 " rep ; movs"__OS"\n"
11 " mov %4,%3 \n"
12 " rep ; movsb \n"
13 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
14 : "0" (n/BYTES_PER_LONG), "r" (n%BYTES_PER_LONG), "1" (to), "2" (from)
15 : "memory" );
16 return to;
17 }
19 /*
20 * This looks horribly ugly, but the compiler can optimize it totally,
21 * as the count is constant.
22 */
23 static always_inline void * __constant_memcpy(
24 void * to, const void * from, size_t n)
25 {
26 switch ( n )
27 {
28 case 0:
29 return to;
30 case 1:
31 *(u8 *)to = *(const u8 *)from;
32 return to;
33 case 2:
34 *(u16 *)to = *(const u16 *)from;
35 return to;
36 case 3:
37 *(u16 *)to = *(const u16 *)from;
38 *(2+(u8 *)to) = *(2+(const u8 *)from);
39 return to;
40 case 4:
41 *(u32 *)to = *(const u32 *)from;
42 return to;
43 case 5:
44 *(u32 *)to = *(const u32 *)from;
45 *(4+(u8 *)to) = *(4+(const u8 *)from);
46 return to;
47 case 6:
48 *(u32 *)to = *(const u32 *)from;
49 *(2+(u16 *)to) = *(2+(const u16 *)from);
50 return to;
51 case 7:
52 *(u32 *)to = *(const u32 *)from;
53 *(2+(u16 *)to) = *(2+(const u16 *)from);
54 *(6+(u8 *)to) = *(6+(const u8 *)from);
55 return to;
56 case 8:
57 *(u64 *)to = *(const u64 *)from;
58 return to;
59 case 12:
60 *(u64 *)to = *(const u64 *)from;
61 *(2+(u32 *)to) = *(2+(const u32 *)from);
62 return to;
63 case 16:
64 *(u64 *)to = *(const u64 *)from;
65 *(1+(u64 *)to) = *(1+(const u64 *)from);
66 return to;
67 case 20:
68 *(u64 *)to = *(const u64 *)from;
69 *(1+(u64 *)to) = *(1+(const u64 *)from);
70 *(4+(u32 *)to) = *(4+(const u32 *)from);
71 return to;
72 }
73 #define COMMON(x) \
74 __asm__ __volatile__ ( \
75 "rep ; movs"__OS \
76 x \
77 : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
78 : "0" (n/BYTES_PER_LONG), "1" (to), "2" (from) \
79 : "memory" );
80 {
81 long d0, d1, d2;
82 switch ( n % BYTES_PER_LONG )
83 {
84 case 0: COMMON(""); return to;
85 case 1: COMMON("\n\tmovsb"); return to;
86 case 2: COMMON("\n\tmovsw"); return to;
87 case 3: COMMON("\n\tmovsw\n\tmovsb"); return to;
88 case 4: COMMON("\n\tmovsl"); return to;
89 case 5: COMMON("\n\tmovsl\n\tmovsb"); return to;
90 case 6: COMMON("\n\tmovsl\n\tmovsw"); return to;
91 case 7: COMMON("\n\tmovsl\n\tmovsw\n\tmovsb"); return to;
92 }
93 }
94 #undef COMMON
95 return to;
96 }
98 #define __HAVE_ARCH_MEMCPY
99 #define memcpy(t,f,n) (__memcpy((t),(f),(n)))
100 static always_inline
101 void *__memcpy(void *t, const void *f, size_t n)
102 {
103 return (__builtin_constant_p(n) ?
104 __constant_memcpy((t),(f),(n)) :
105 __variable_memcpy((t),(f),(n)));
106 }
108 /* Some version of gcc don't have this builtin. It's non-critical anyway. */
109 #define __HAVE_ARCH_MEMMOVE
110 extern void *memmove(void *dest, const void *src, size_t n);
112 static inline void *__memset_generic(void *s, char c, size_t count)
113 {
114 long d0, d1;
115 __asm__ __volatile__ (
116 "rep ; stosb"
117 : "=&c" (d0), "=&D" (d1) : "a" (c), "1" (s), "0" (count) : "memory" );
118 return s;
119 }
121 /* we might want to write optimized versions of these later */
122 #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
124 /*
125 * memset(x,0,y) is a reasonably common thing to do, so we want to fill
126 * things 32 bits at a time even when we don't know the size of the
127 * area at compile-time..
128 */
129 static inline void *__constant_c_memset(void *s, unsigned long c, size_t count)
130 {
131 long d0, d1;
132 __asm__ __volatile__(
133 " rep ; stos"__OS"\n"
134 " mov %3,%4 \n"
135 " rep ; stosb \n"
136 : "=&c" (d0), "=&D" (d1)
137 : "a" (c), "r" (count%BYTES_PER_LONG),
138 "0" (count/BYTES_PER_LONG), "1" (s)
139 : "memory" );
140 return s;
141 }
143 /*
144 * This looks horribly ugly, but the compiler can optimize it totally,
145 * as we by now know that both pattern and count is constant..
146 */
147 static always_inline void *__constant_c_and_count_memset(
148 void *s, unsigned long pattern, size_t count)
149 {
150 switch ( count )
151 {
152 case 0:
153 return s;
154 case 1:
155 *(u8 *)s = pattern;
156 return s;
157 case 2:
158 *(u16 *)s = pattern;
159 return s;
160 case 3:
161 *(u16 *)s = pattern;
162 *(2+(u8 *)s) = pattern;
163 return s;
164 case 4:
165 *(u32 *)s = pattern;
166 return s;
167 case 5:
168 *(u32 *)s = pattern;
169 *(4+(u8 *)s) = pattern;
170 return s;
171 case 6:
172 *(u32 *)s = pattern;
173 *(2+(u16 *)s) = pattern;
174 return s;
175 case 7:
176 *(u32 *)s = pattern;
177 *(2+(u16 *)s) = pattern;
178 *(6+(u8 *)s) = pattern;
179 return s;
180 case 8:
181 *(u64 *)s = pattern;
182 return s;
183 }
184 #define COMMON(x) \
185 __asm__ __volatile__ ( \
186 "rep ; stos"__OS \
187 x \
188 : "=&c" (d0), "=&D" (d1) \
189 : "a" (pattern), "0" (count/BYTES_PER_LONG), "1" (s) \
190 : "memory" )
191 {
192 long d0, d1;
193 switch ( count % BYTES_PER_LONG )
194 {
195 case 0: COMMON(""); return s;
196 case 1: COMMON("\n\tstosb"); return s;
197 case 2: COMMON("\n\tstosw"); return s;
198 case 3: COMMON("\n\tstosw\n\tstosb"); return s;
199 case 4: COMMON("\n\tstosl"); return s;
200 case 5: COMMON("\n\tstosl\n\tstosb"); return s;
201 case 6: COMMON("\n\tstosl\n\tstosw"); return s;
202 case 7: COMMON("\n\tstosl\n\tstosw\n\tstosb"); return s;
203 }
204 }
205 #undef COMMON
206 return s;
207 }
209 #define __constant_c_x_memset(s, c, count) \
210 (__builtin_constant_p(count) ? \
211 __constant_c_and_count_memset((s),(c),(count)) : \
212 __constant_c_memset((s),(c),(count)))
214 #define __var_x_memset(s, c, count) \
215 (__builtin_constant_p(count) ? \
216 __constant_count_memset((s),(c),(count)) : \
217 __memset_generic((s),(c),(count)))
219 #ifdef CONFIG_X86_64
220 #define MEMSET_PATTERN_MUL 0x0101010101010101UL
221 #else
222 #define MEMSET_PATTERN_MUL 0x01010101UL
223 #endif
225 #define __HAVE_ARCH_MEMSET
226 #define memset(s, c, count) (__memset((s),(c),(count)))
227 #define __memset(s, c, count) \
228 (__builtin_constant_p(c) ? \
229 __constant_c_x_memset((s),(MEMSET_PATTERN_MUL*(unsigned char)(c)),(count)) : \
230 __var_x_memset((s),(c),(count)))
232 #endif /* __X86_STRING_H__ */