ia64/xen-unstable

changeset 5337:7385718ad7c0

bitkeeper revision 1.1683 (42a4622ekYvso9kv8cisS5m2QiUqiQ)

Merge x86/32 and x86/64 string functions.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kaf24@firebug.cl.cam.ac.uk
date Mon Jun 06 14:48:14 2005 +0000 (2005-06-06)
parents 4736c86802ad
children 095b99f7c9c6
files .rootkeys xen/include/asm-x86/page.h xen/include/asm-x86/string.h xen/include/asm-x86/types.h xen/include/asm-x86/x86_32/string.h xen/include/asm-x86/x86_64/string.h
line diff
     1.1 --- a/.rootkeys	Mon Jun 06 10:52:53 2005 +0000
     1.2 +++ b/.rootkeys	Mon Jun 06 14:48:14 2005 +0000
     1.3 @@ -1387,7 +1387,7 @@ 405b8599BsDsDwKEJLS0XipaiQW3TA xen/inclu
     1.4  3ddb79c3Hgbb2g8CyWLMCK-6_ZVQSQ xen/include/asm-x86/smp.h
     1.5  3ddb79c3jn8ALV_S9W5aeTYUQRKBpg xen/include/asm-x86/smpboot.h
     1.6  3ddb79c3NiyQE2vQnyGiaBnNjBO1rA xen/include/asm-x86/spinlock.h
     1.7 -40e1966akOHWvvunCED7x3HPv35QvQ xen/include/asm-x86/string.h
     1.8 +3e7f358aG11EvMI9VJ4_9hD4LUO7rQ xen/include/asm-x86/string.h
     1.9  3ddb79c3ezddh34MdelJpa5tNR00Dw xen/include/asm-x86/system.h
    1.10  42033fc1Bb8ffTshBYFGouGkiAMoUQ xen/include/asm-x86/time.h
    1.11  3ddb79c4HugMq7IYGxcQKFBpKwKhzA xen/include/asm-x86/types.h
    1.12 @@ -1404,13 +1404,11 @@ 429c852fi3pvfa9kIjryYK5AGBmXAg xen/inclu
    1.13  429c852fskvSOgcD5EC25_m9um9t4g xen/include/asm-x86/x86_32/page-3level.h
    1.14  4208e2a3ZNFroNXbX9OYaOB-xtUyDQ xen/include/asm-x86/x86_32/page.h
    1.15  3ddb79c3mbqEM7QQr3zVq7NiBNhouA xen/include/asm-x86/x86_32/regs.h
    1.16 -3e7f358aG11EvMI9VJ4_9hD4LUO7rQ xen/include/asm-x86/x86_32/string.h
    1.17  3ddb79c3M2n1ROZH6xk3HbyN4CPDqg xen/include/asm-x86/x86_32/uaccess.h
    1.18  41bf1717bML6GxpclTWJabiaO5W5vg xen/include/asm-x86/x86_64/asm_defns.h
    1.19  41febc4b1aCGLsm0Y0b_82h7lFtrEA xen/include/asm-x86/x86_64/domain_page.h
    1.20  4208e2a3Fktw4ZttKdDxbhvTQ6brfQ xen/include/asm-x86/x86_64/page.h
    1.21  404f1bb86rAXB3aLS1vYdcqpJiEcyg xen/include/asm-x86/x86_64/regs.h
    1.22 -40e1966azOJZfNI6Ilthe6Q-T3Hewg xen/include/asm-x86/x86_64/string.h
    1.23  404f1bc4tWkB9Qr8RkKtZGW5eMQzhw xen/include/asm-x86/x86_64/uaccess.h
    1.24  422f27c8RHFkePhD34VIEpMMqofZcA xen/include/asm-x86/x86_emulate.h
    1.25  400304fcmRQmDdFYEzDh0wcBba9alg xen/include/public/COPYING
     2.1 --- a/xen/include/asm-x86/page.h	Mon Jun 06 10:52:53 2005 +0000
     2.2 +++ b/xen/include/asm-x86/page.h	Mon Jun 06 14:48:14 2005 +0000
     2.3 @@ -185,22 +185,26 @@ typedef struct { u64 pfn; } pagetable_t;
     2.4  #define pfn_valid(_pfn)     ((_pfn) < max_page)
     2.5  
     2.6  /* High table entries are reserved by the hypervisor. */
     2.7 -/* FIXME: this breaks with PAE -- kraxel */
     2.8 +#if defined(CONFIG_X86_32) && !defined(CONFIG_PAE)
     2.9  #define DOMAIN_ENTRIES_PER_L2_PAGETABLE     \
    2.10    (HYPERVISOR_VIRT_START >> L2_PAGETABLE_SHIFT)
    2.11  #define HYPERVISOR_ENTRIES_PER_L2_PAGETABLE \
    2.12    (L2_PAGETABLE_ENTRIES - DOMAIN_ENTRIES_PER_L2_PAGETABLE)
    2.13 +#else
    2.14 +#define DOMAIN_ENTRIES_PER_L2_PAGETABLE     0
    2.15 +#define HYPERVISOR_ENTRIES_PER_L2_PAGETABLE 0
    2.16 +#endif
    2.17  
    2.18  #define linear_l1_table                                                 \
    2.19      ((l1_pgentry_t *)(LINEAR_PT_VIRT_START))
    2.20 -#define __linear_l2_table                                                 \
    2.21 +#define __linear_l2_table                                               \
    2.22      ((l2_pgentry_t *)(LINEAR_PT_VIRT_START +                            \
    2.23                       (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0))))
    2.24 -#define __linear_l3_table                                                 \
    2.25 +#define __linear_l3_table                                               \
    2.26      ((l3_pgentry_t *)(LINEAR_PT_VIRT_START +                            \
    2.27                       (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0)) +   \
    2.28                       (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<1))))
    2.29 -#define __linear_l4_table                                                 \
    2.30 +#define __linear_l4_table                                               \
    2.31      ((l4_pgentry_t *)(LINEAR_PT_VIRT_START +                            \
    2.32                       (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0)) +   \
    2.33                       (LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<1)) +   \
     3.1 --- a/xen/include/asm-x86/string.h	Mon Jun 06 10:52:53 2005 +0000
     3.2 +++ b/xen/include/asm-x86/string.h	Mon Jun 06 14:48:14 2005 +0000
     3.3 @@ -1,5 +1,445 @@
     3.4 -#ifdef __x86_64__
     3.5 -#include <asm/x86_64/string.h>
     3.6 +#ifndef __X86_STRING_H__
     3.7 +#define __X86_STRING_H__
     3.8 +
     3.9 +#include <xen/config.h>
    3.10 +
    3.11 +#define __HAVE_ARCH_STRCPY
    3.12 +static inline char *strcpy(char *dest, const char *src)
    3.13 +{
    3.14 +    long d0, d1, d2;
    3.15 +    __asm__ __volatile__ (
    3.16 +        "1: lodsb          \n"
    3.17 +        "   stosb          \n"
    3.18 +        "   test %%al,%%al \n"
    3.19 +        "   jne  1b        \n"
    3.20 +        : "=&S" (d0), "=&D" (d1), "=&a" (d2)
    3.21 +        : "0" (src), "1" (dest) : "memory" );
    3.22 +    return dest;
    3.23 +}
    3.24 +
    3.25 +#define __HAVE_ARCH_STRNCPY
    3.26 +static inline char *strncpy(char *dest, const char *src, size_t count)
    3.27 +{
    3.28 +    long d0, d1, d2, d3;
    3.29 +    __asm__ __volatile__ (
    3.30 +        "1: dec  %2        \n"
    3.31 +        "   js   2f        \n"
    3.32 +        "   lodsb          \n"
    3.33 +        "   stosb          \n"
    3.34 +        "   test %%al,%%al \n"
    3.35 +        "   jne  1b        \n"
    3.36 +        "   rep ; stosb    \n"
    3.37 +        "2:                \n"
    3.38 +        : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
    3.39 +        : "0" (src), "1" (dest), "2" (count) : "memory" );
    3.40 +    return dest;
    3.41 +}
    3.42 +
    3.43 +#define __HAVE_ARCH_STRCAT
    3.44 +static inline char *strcat(char *dest, const char *src)
    3.45 +{
    3.46 +    long d0, d1, d2, d3;
    3.47 +    __asm__ __volatile__ (
    3.48 +        "   repne ; scasb  \n"
    3.49 +        "   dec  %1        \n"
    3.50 +        "1: lodsb          \n"
    3.51 +        "   stosb          \n"
    3.52 +        "   test %%al,%%al \n"
    3.53 +        "   jne  1b        \n"
    3.54 +        : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
    3.55 +        : "0" (src), "1" (dest), "2" (0UL), "3" (0xffffffffUL) : "memory" );
    3.56 +    return dest;
    3.57 +}
    3.58 +
    3.59 +#define __HAVE_ARCH_STRNCAT
    3.60 +static inline char *strncat(char *dest, const char *src, size_t count)
    3.61 +{
    3.62 +    long d0, d1, d2, d3;
    3.63 +    __asm__ __volatile__ (
    3.64 +        "   repne ; scasb   \n"
    3.65 +        "   dec  %1         \n"
    3.66 +        "   mov  %8,%3      \n"
    3.67 +        "1: dec  %3         \n"
    3.68 +        "   js   2f         \n"
    3.69 +        "   lodsb           \n"
    3.70 +        "   stosb           \n"
    3.71 +        "   test %%al,%%al  \n"
    3.72 +        "   jne  1b         \n"
    3.73 +        "2: xor  %%eax,%%eax\n"
    3.74 +        "   stosb"
    3.75 +        : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
    3.76 +        : "0" (src), "1" (dest), "2" (0UL), "3" (0xffffffffUL), "g" (count)
    3.77 +        : "memory" );
    3.78 +    return dest;
    3.79 +}
    3.80 +
    3.81 +#define __HAVE_ARCH_STRCMP
    3.82 +static inline int strcmp(const char *cs, const char *ct)
    3.83 +{
    3.84 +    long d0, d1;
    3.85 +    register int __res;
    3.86 +    __asm__ __volatile__ (
    3.87 +        "1: lodsb           \n"
    3.88 +        "   scasb           \n"
    3.89 +        "   jne  2f         \n"
    3.90 +        "   test %%al,%%al  \n"
    3.91 +        "   jne  1b         \n"
    3.92 +        "   xor  %%eax,%%eax\n"
    3.93 +        "   jmp  3f         \n"
    3.94 +        "2: sbb  %%eax,%%eax\n"
    3.95 +        "   or   $1,%%al    \n"
    3.96 +        "3:                 \n"
    3.97 +        : "=a" (__res), "=&S" (d0), "=&D" (d1)
    3.98 +        : "1" (cs), "2" (ct) );
    3.99 +    return __res;
   3.100 +}
   3.101 +
   3.102 +#define __HAVE_ARCH_STRNCMP
   3.103 +static inline int strncmp(const char *cs, const char *ct, size_t count)
   3.104 +{
   3.105 +    long d0, d1, d2;
   3.106 +    register int __res;
   3.107 +    __asm__ __volatile__ (
   3.108 +        "1: dec  %3         \n"
   3.109 +        "   js   2f         \n"
   3.110 +        "   lodsb           \n"
   3.111 +        "   scasb           \n"
   3.112 +        "   jne  3f         \n"
   3.113 +        "   test %%al,%%al  \n"
   3.114 +        "   jne  1b         \n"
   3.115 +        "2: xor  %%eax,%%eax\n"
   3.116 +        "   jmp  4f         \n"
   3.117 +        "3: sbb  %%eax,%%eax\n"
   3.118 +        "   or   $1,%%al    \n"
   3.119 +        "4:                 \n"
   3.120 +        : "=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
   3.121 +        : "1" (cs), "2" (ct), "3" (count) );
   3.122 +    return __res;
   3.123 +}
   3.124 +
   3.125 +#define __HAVE_ARCH_STRCHR
   3.126 +static inline char *strchr(const char *s, int c)
   3.127 +{
   3.128 +    long d0;
   3.129 +    register char *__res;
   3.130 +    __asm__ __volatile__ (
   3.131 +        "   mov  %%al,%%ah  \n"
   3.132 +        "1: lodsb           \n"
   3.133 +        "   cmp  %%ah,%%al  \n"
   3.134 +        "   je   2f         \n"
   3.135 +        "   test %%al,%%al  \n"
   3.136 +        "   jne  1b         \n"
   3.137 +        "   mov  $1,%1      \n"
   3.138 +        "2: mov  %1,%0      \n"
   3.139 +        "   dec  %0         \n"
   3.140 +        : "=a" (__res), "=&S" (d0) : "1" (s), "0" (c) );
   3.141 +    return __res;
   3.142 +}
   3.143 +
   3.144 +#define __HAVE_ARCH_STRRCHR
   3.145 +static inline char *strrchr(const char *s, int c)
   3.146 +{
   3.147 +    long d0, d1;
   3.148 +    register char *__res;
   3.149 +    __asm__ __volatile__ (
   3.150 +        "   mov  %%al,%%ah  \n"
   3.151 +        "1: lodsb           \n"
   3.152 +        "   cmp  %%ah,%%al  \n"
   3.153 +        "   jne  2f         \n"
   3.154 +        "   lea  -1(%1),%0  \n"
   3.155 +        "2: test %%al,%%al  \n"
   3.156 +        "   jne  1b         \n"
   3.157 +        : "=g" (__res), "=&S" (d0), "=&a" (d1) : "0" (0), "1" (s), "2" (c) );
   3.158 +    return __res;
   3.159 +}
   3.160 +
   3.161 +#define __HAVE_ARCH_STRLEN
   3.162 +static inline size_t strlen(const char *s)
   3.163 +{
   3.164 +    long d0;
   3.165 +    register int __res;
   3.166 +    __asm__ __volatile__ (
   3.167 +        "   repne ; scasb  \n"
   3.168 +        "   notl %0        \n"
   3.169 +        "   decl %0        \n"
   3.170 +        : "=c" (__res), "=&D" (d0) : "1" (s), "a" (0), "0" (0xffffffffUL) );
   3.171 +    return __res;
   3.172 +}
   3.173 +
   3.174 +static inline void *__variable_memcpy(void *to, const void *from, size_t n)
   3.175 +{
   3.176 +    long d0, d1, d2;
   3.177 +    __asm__ __volatile__ (
   3.178 +        "   rep ; movs"__OS"\n"
   3.179 +        "   mov %4,%3       \n"
   3.180 +        "   rep ; movsb     \n"
   3.181 +        : "=&c" (d0), "=&D" (d1), "=&S" (d2)
   3.182 +        : "0" (n/BYTES_PER_LONG), "r" (n%BYTES_PER_LONG), "1" (to), "2" (from)
   3.183 +        : "memory" );
   3.184 +    return to;
   3.185 +}
   3.186 +
   3.187 +/*
   3.188 + * This looks horribly ugly, but the compiler can optimize it totally,
   3.189 + * as the count is constant.
   3.190 + */
   3.191 +static always_inline void * __constant_memcpy(
   3.192 +    void * to, const void * from, size_t n)
   3.193 +{
   3.194 +    switch ( n )
   3.195 +    {
   3.196 +    case 0:
   3.197 +        return to;
   3.198 +    case 1:
   3.199 +        *(u8 *)to = *(const u8 *)from;
   3.200 +        return to;
   3.201 +    case 2:
   3.202 +        *(u16 *)to = *(const u16 *)from;
   3.203 +        return to;
   3.204 +    case 3:
   3.205 +        *(u16 *)to = *(const u16 *)from;
   3.206 +        *(2+(u8 *)to) = *(2+(const u8 *)from);
   3.207 +        return to;
   3.208 +    case 4:
   3.209 +        *(u32 *)to = *(const u32 *)from;
   3.210 +        return to;
   3.211 +    case 5:
   3.212 +        *(u32 *)to = *(const u32 *)from;
   3.213 +        *(4+(u8 *)to) = *(4+(const u8 *)from);
   3.214 +        return to;
   3.215 +    case 6:
   3.216 +        *(u32 *)to = *(const u32 *)from;
   3.217 +        *(2+(u16 *)to) = *(2+(const u16 *)from);
   3.218 +        return to;
   3.219 +    case 7:
   3.220 +        *(u32 *)to = *(const u32 *)from;
   3.221 +        *(2+(u16 *)to) = *(2+(const u16 *)from);
   3.222 +        *(6+(u8 *)to) = *(6+(const u8 *)from);
   3.223 +        return to;
   3.224 +    case 8:
   3.225 +        *(u64 *)to = *(const u64 *)from;
   3.226 +        return to;
   3.227 +    case 12:
   3.228 +        *(u64 *)to = *(const u64 *)from;
   3.229 +        *(2+(u32 *)to) = *(2+(const u32 *)from);
   3.230 +        return to;
   3.231 +    case 16:
   3.232 +        *(u64 *)to = *(const u64 *)from;
   3.233 +        *(1+(u64 *)to) = *(1+(const u64 *)from);
   3.234 +        return to;
   3.235 +    case 20:
   3.236 +        *(u64 *)to = *(const u64 *)from;
   3.237 +        *(1+(u64 *)to) = *(1+(const u64 *)from);
   3.238 +        *(4+(u32 *)to) = *(4+(const u32 *)from);
   3.239 +        return to;
   3.240 +    }
   3.241 +#define COMMON(x)                                       \
   3.242 +    __asm__ __volatile__ (                              \
   3.243 +        "rep ; movs"__OS                                \
   3.244 +        x                                               \
   3.245 +        : "=&c" (d0), "=&D" (d1), "=&S" (d2)            \
   3.246 +        : "0" (n/BYTES_PER_LONG), "1" (to), "2" (from)  \
   3.247 +        : "memory" );
   3.248 +    {
   3.249 +        long d0, d1, d2;
   3.250 +        switch ( n % BYTES_PER_LONG )
   3.251 +        {
   3.252 +        case 0: COMMON(""); return to;
   3.253 +        case 1: COMMON("\n\tmovsb"); return to;
   3.254 +        case 2: COMMON("\n\tmovsw"); return to;
   3.255 +        case 3: COMMON("\n\tmovsw\n\tmovsb"); return to;
   3.256 +        case 4: COMMON("\n\tmovsl"); return to;
   3.257 +        case 5: COMMON("\n\tmovsl\n\tmovsb"); return to;
   3.258 +        case 6: COMMON("\n\tmovsl\n\tmovsw"); return to;
   3.259 +        case 7: COMMON("\n\tmovsl\n\tmovsw\n\tmovsb"); return to;
   3.260 +        }
   3.261 +    }
   3.262 +#undef COMMON
   3.263 +}
   3.264 +
   3.265 +#define __HAVE_ARCH_MEMCPY
   3.266 +#define memcpy(t,f,n) (__memcpy((t),(f),(n)))
   3.267 +static always_inline
   3.268 +void *__memcpy(void *t, const void *f, size_t n)
   3.269 +{
   3.270 +    return (__builtin_constant_p(n) ?
   3.271 +            __constant_memcpy((t),(f),(n)) :
   3.272 +            __variable_memcpy((t),(f),(n)));
   3.273 +}
   3.274 +
   3.275 +/* Some version of gcc don't have this builtin. It's non-critical anyway. */
   3.276 +#define __HAVE_ARCH_MEMMOVE
   3.277 +extern void *memmove(void *dest, const void *src, size_t n);
   3.278 +
   3.279 +#define __HAVE_ARCH_MEMCMP
   3.280 +#define memcmp __builtin_memcmp
   3.281 +
   3.282 +#define __HAVE_ARCH_MEMCHR
   3.283 +static inline void *memchr(const void *cs, int c, size_t count)
   3.284 +{
   3.285 +    long d0;
   3.286 +    register void *__res;
   3.287 +    if ( count == 0 )
   3.288 +        return NULL;
   3.289 +    __asm__ __volatile__ (
   3.290 +        "   repne ; scasb\n"
   3.291 +        "   je   1f      \n"
   3.292 +        "   mov  $1,%0   \n"
   3.293 +        "1: dec  %0      \n"
   3.294 +        : "=D" (__res), "=&c" (d0) : "a" (c), "0" (cs), "1" (count) );
   3.295 +    return __res;
   3.296 +}
   3.297 +
   3.298 +static inline void *__memset_generic(void *s, char c, size_t count)
   3.299 +{
   3.300 +    long d0, d1;
   3.301 +    __asm__ __volatile__ (
   3.302 +        "rep ; stosb"
   3.303 +        : "=&c" (d0), "=&D" (d1) : "a" (c), "1" (s), "0" (count) : "memory" );
   3.304 +    return s;
   3.305 +}
   3.306 +
   3.307 +/* we might want to write optimized versions of these later */
   3.308 +#define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
   3.309 +
   3.310 +/*
   3.311 + * memset(x,0,y) is a reasonably common thing to do, so we want to fill
   3.312 + * things 32 bits at a time even when we don't know the size of the
   3.313 + * area at compile-time..
   3.314 + */
   3.315 +static inline void *__constant_c_memset(void *s, unsigned long c, size_t count)
   3.316 +{
   3.317 +    long d0, d1;
   3.318 +    __asm__ __volatile__(
   3.319 +        "   rep ; stos"__OS"\n"
   3.320 +        "   mov  %3,%4      \n"
   3.321 +        "   rep ; stosb     \n"
   3.322 +        : "=&c" (d0), "=&D" (d1)
   3.323 +        : "a" (c), "r" (count%BYTES_PER_LONG),
   3.324 +          "0" (count/BYTES_PER_LONG), "1" (s)
   3.325 +        : "memory" );
   3.326 +    return s; 
   3.327 +}
   3.328 +
   3.329 +#define __HAVE_ARCH_STRNLEN
   3.330 +static inline size_t strnlen(const char *s, size_t count)
   3.331 +{
   3.332 +    long d0;
   3.333 +    register int __res;
   3.334 +    __asm__ __volatile__ (
   3.335 +        "   jmp  2f       \n"
   3.336 +        "1: cmpb $0,(%3)  \n"
   3.337 +        "   je   3f       \n"
   3.338 +        "   inc  %3       \n"
   3.339 +        "2: dec  %1       \n"
   3.340 +        "   jns  1b       \n"
   3.341 +        "3: subl %2,%0    \n"
   3.342 +        : "=a" (__res), "=&d" (d0)
   3.343 +        : "c" ((int)(long)s), "0" (s), "1" (count) );
   3.344 +    return __res;
   3.345 +}
   3.346 +
   3.347 +/*
   3.348 + * This looks horribly ugly, but the compiler can optimize it totally,
   3.349 + * as we by now know that both pattern and count is constant..
   3.350 + */
   3.351 +static always_inline void *__constant_c_and_count_memset(
   3.352 +    void *s, unsigned long pattern, size_t count)
   3.353 +{
   3.354 +    switch ( count )
   3.355 +    {
   3.356 +    case 0:
   3.357 +        return s;
   3.358 +    case 1:
   3.359 +        *(u8 *)s = pattern;
   3.360 +        return s;
   3.361 +    case 2:
   3.362 +        *(u16 *)s = pattern;
   3.363 +        return s;
   3.364 +    case 3:
   3.365 +        *(u16 *)s = pattern;
   3.366 +        *(2+(u8 *)s) = pattern;
   3.367 +        return s;
   3.368 +    case 4:
   3.369 +        *(u32 *)s = pattern;
   3.370 +        return s;
   3.371 +    case 5:
   3.372 +        *(u32 *)s = pattern;
   3.373 +        *(4+(u8 *)s) = pattern;
   3.374 +        return s;
   3.375 +    case 6:
   3.376 +        *(u32 *)s = pattern;
   3.377 +        *(2+(u16 *)s) = pattern;
   3.378 +        return s;
   3.379 +    case 7:
   3.380 +        *(u32 *)s = pattern;
   3.381 +        *(2+(u16 *)s) = pattern;
   3.382 +        *(6+(u8 *)s) = pattern;
   3.383 +        return s;
   3.384 +    case 8:
   3.385 +        *(u64 *)s = pattern;
   3.386 +        return s;
   3.387 +    }
   3.388 +#define COMMON(x)                                               \
   3.389 +    __asm__  __volatile__ (                                     \
   3.390 +        "rep ; stos"__OS                                        \
   3.391 +        x                                                       \
   3.392 +        : "=&c" (d0), "=&D" (d1)                                \
   3.393 +        : "a" (pattern), "0" (count/BYTES_PER_LONG), "1" (s)    \
   3.394 +        : "memory" )
   3.395 +    {
   3.396 +        long d0, d1;
   3.397 +        switch ( count % BYTES_PER_LONG )
   3.398 +        {
   3.399 +        case 0: COMMON(""); return s;
   3.400 +        case 1: COMMON("\n\tstosb"); return s;
   3.401 +        case 2: COMMON("\n\tstosw"); return s;
   3.402 +        case 3: COMMON("\n\tstosw\n\tstosb"); return s;
   3.403 +        case 4: COMMON("\n\tstosl"); return s;
   3.404 +        case 5: COMMON("\n\tstosl\n\tstosb"); return s;
   3.405 +        case 6: COMMON("\n\tstosl\n\tstosw"); return s;
   3.406 +        case 7: COMMON("\n\tstosl\n\tstosw\n\tstosb"); return s;
   3.407 +        }
   3.408 +    }
   3.409 +#undef COMMON
   3.410 +}
   3.411 +
   3.412 +#define __constant_c_x_memset(s, c, count) \
   3.413 +(__builtin_constant_p(count) ? \
   3.414 + __constant_c_and_count_memset((s),(c),(count)) : \
   3.415 + __constant_c_memset((s),(c),(count)))
   3.416 +
   3.417 +#define __var_x_memset(s, c, count) \
   3.418 +(__builtin_constant_p(count) ? \
   3.419 + __constant_count_memset((s),(c),(count)) : \
   3.420 + __memset_generic((s),(c),(count)))
   3.421 +
   3.422 +#ifdef CONFIG_X86_64
   3.423 +#define MEMSET_PATTERN_MUL 0x0101010101010101UL
   3.424  #else
   3.425 -#include <asm/x86_32/string.h>
   3.426 +#define MEMSET_PATTERN_MUL 0x01010101UL
   3.427  #endif
   3.428 +
   3.429 +#define __HAVE_ARCH_MEMSET
   3.430 +#define memset(s, c, count) (__memset((s),(c),(count)))
   3.431 +#define __memset(s, c, count) \
   3.432 +(__builtin_constant_p(c) ? \
   3.433 + __constant_c_x_memset((s),(MEMSET_PATTERN_MUL*(unsigned char)(c)),(count)) : \
   3.434 + __var_x_memset((s),(c),(count)))
   3.435 +
   3.436 +#define __HAVE_ARCH_MEMSCAN
   3.437 +static inline void *memscan(void *addr, int c, size_t size)
   3.438 +{
   3.439 +    if ( size == 0 )
   3.440 +        return addr;
   3.441 +    __asm__ (
   3.442 +        "   repnz; scasb \n"
   3.443 +        "   jnz  1f      \n"
   3.444 +        "   dec  %0      \n"
   3.445 +        "1:              \n"
   3.446 +        : "=D" (addr), "=c" (size)
   3.447 +        : "0" (addr), "1" (size), "a" (c) );
   3.448 +    return addr;
   3.449 +}
   3.450 +
   3.451 +#endif /* __X86_STRING_H__ */
     4.1 --- a/xen/include/asm-x86/types.h	Mon Jun 06 10:52:53 2005 +0000
     4.2 +++ b/xen/include/asm-x86/types.h	Mon Jun 06 14:48:14 2005 +0000
     4.3 @@ -1,8 +1,5 @@
     4.4 -#ifndef _X86_TYPES_H
     4.5 -#define _X86_TYPES_H
     4.6 -
     4.7 -typedef unsigned short umode_t;
     4.8 -
     4.9 +#ifndef __X86_TYPES_H__
    4.10 +#define __X86_TYPES_H__
    4.11  
    4.12  /*
    4.13   * __xx is ok: it doesn't pollute the POSIX namespace. Use these in the
    4.14 @@ -45,7 +42,6 @@ typedef unsigned long long u64;
    4.15  #define BITS_PER_LONG 32
    4.16  #define BYTES_PER_LONG 4
    4.17  #define LONG_BYTEORDER 2
    4.18 -typedef unsigned int size_t;
    4.19  #if defined(CONFIG_X86_PAE)
    4.20  typedef u64 physaddr_t;
    4.21  #else
    4.22 @@ -57,15 +53,9 @@ typedef unsigned long u64;
    4.23  #define BITS_PER_LONG 64
    4.24  #define BYTES_PER_LONG 8
    4.25  #define LONG_BYTEORDER 3
    4.26 -typedef unsigned long size_t;
    4.27  typedef u64 physaddr_t;
    4.28  #endif
    4.29  
    4.30 -/* DMA addresses come in generic and 64-bit flavours.  */
    4.31 +typedef unsigned long size_t;
    4.32  
    4.33 -typedef unsigned long dma_addr_t;
    4.34 -typedef u64 dma64_addr_t;
    4.35 -
    4.36 -typedef unsigned short xmem_bufctl_t;
    4.37 -
    4.38 -#endif
    4.39 +#endif /* __X86_TYPES_H__ */
     5.1 --- a/xen/include/asm-x86/x86_32/string.h	Mon Jun 06 10:52:53 2005 +0000
     5.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     5.3 @@ -1,489 +0,0 @@
     5.4 -#ifndef _I386_STRING_H_
     5.5 -#define _I386_STRING_H_
     5.6 -
     5.7 -#include <xen/config.h>
     5.8 -
     5.9 -/*
    5.10 - * This string-include defines all string functions as inline
    5.11 - * functions. Use gcc. It also assumes ds=es=data space, this should be
    5.12 - * normal. Most of the string-functions are rather heavily hand-optimized,
    5.13 - * see especially strtok,strstr,str[c]spn. They should work, but are not
    5.14 - * very easy to understand. Everything is done entirely within the register
    5.15 - * set, making the functions fast and clean. String instructions have been
    5.16 - * used through-out, making for "slightly" unclear code :-)
    5.17 - *
    5.18 - *		NO Copyright (C) 1991, 1992 Linus Torvalds,
    5.19 - *		consider these trivial functions to be PD.
    5.20 - */
    5.21 -
    5.22 -
    5.23 -#define __HAVE_ARCH_STRCPY
    5.24 -static inline char * strcpy(char * dest,const char *src)
    5.25 -{
    5.26 -int d0, d1, d2;
    5.27 -__asm__ __volatile__(
    5.28 -	"1:\tlodsb\n\t"
    5.29 -	"stosb\n\t"
    5.30 -	"testb %%al,%%al\n\t"
    5.31 -	"jne 1b"
    5.32 -	: "=&S" (d0), "=&D" (d1), "=&a" (d2)
    5.33 -	:"0" (src),"1" (dest) : "memory");
    5.34 -return dest;
    5.35 -}
    5.36 -
    5.37 -#define __HAVE_ARCH_STRNCPY
    5.38 -static inline char * strncpy(char * dest,const char *src,size_t count)
    5.39 -{
    5.40 -int d0, d1, d2, d3;
    5.41 -__asm__ __volatile__(
    5.42 -	"1:\tdecl %2\n\t"
    5.43 -	"js 2f\n\t"
    5.44 -	"lodsb\n\t"
    5.45 -	"stosb\n\t"
    5.46 -	"testb %%al,%%al\n\t"
    5.47 -	"jne 1b\n\t"
    5.48 -	"rep\n\t"
    5.49 -	"stosb\n"
    5.50 -	"2:"
    5.51 -	: "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
    5.52 -	:"0" (src),"1" (dest),"2" (count) : "memory");
    5.53 -return dest;
    5.54 -}
    5.55 -
    5.56 -#define __HAVE_ARCH_STRCAT
    5.57 -static inline char * strcat(char * dest,const char * src)
    5.58 -{
    5.59 -int d0, d1, d2, d3;
    5.60 -__asm__ __volatile__(
    5.61 -	"repne\n\t"
    5.62 -	"scasb\n\t"
    5.63 -	"decl %1\n"
    5.64 -	"1:\tlodsb\n\t"
    5.65 -	"stosb\n\t"
    5.66 -	"testb %%al,%%al\n\t"
    5.67 -	"jne 1b"
    5.68 -	: "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
    5.69 -	: "0" (src), "1" (dest), "2" (0), "3" (0xffffffff):"memory");
    5.70 -return dest;
    5.71 -}
    5.72 -
    5.73 -#define __HAVE_ARCH_STRNCAT
    5.74 -static inline char * strncat(char * dest,const char * src,size_t count)
    5.75 -{
    5.76 -int d0, d1, d2, d3;
    5.77 -__asm__ __volatile__(
    5.78 -	"repne\n\t"
    5.79 -	"scasb\n\t"
    5.80 -	"decl %1\n\t"
    5.81 -	"movl %8,%3\n"
    5.82 -	"1:\tdecl %3\n\t"
    5.83 -	"js 2f\n\t"
    5.84 -	"lodsb\n\t"
    5.85 -	"stosb\n\t"
    5.86 -	"testb %%al,%%al\n\t"
    5.87 -	"jne 1b\n"
    5.88 -	"2:\txorl %2,%2\n\t"
    5.89 -	"stosb"
    5.90 -	: "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
    5.91 -	: "0" (src),"1" (dest),"2" (0),"3" (0xffffffff), "g" (count)
    5.92 -	: "memory");
    5.93 -return dest;
    5.94 -}
    5.95 -
    5.96 -#define __HAVE_ARCH_STRCMP
    5.97 -static inline int strcmp(const char * cs,const char * ct)
    5.98 -{
    5.99 -int d0, d1;
   5.100 -register int __res;
   5.101 -__asm__ __volatile__(
   5.102 -	"1:\tlodsb\n\t"
   5.103 -	"scasb\n\t"
   5.104 -	"jne 2f\n\t"
   5.105 -	"testb %%al,%%al\n\t"
   5.106 -	"jne 1b\n\t"
   5.107 -	"xorl %%eax,%%eax\n\t"
   5.108 -	"jmp 3f\n"
   5.109 -	"2:\tsbbl %%eax,%%eax\n\t"
   5.110 -	"orb $1,%%al\n"
   5.111 -	"3:"
   5.112 -	:"=a" (__res), "=&S" (d0), "=&D" (d1)
   5.113 -		     :"1" (cs),"2" (ct));
   5.114 -return __res;
   5.115 -}
   5.116 -
   5.117 -#define __HAVE_ARCH_STRNCMP
   5.118 -static inline int strncmp(const char * cs,const char * ct,size_t count)
   5.119 -{
   5.120 -register int __res;
   5.121 -int d0, d1, d2;
   5.122 -__asm__ __volatile__(
   5.123 -	"1:\tdecl %3\n\t"
   5.124 -	"js 2f\n\t"
   5.125 -	"lodsb\n\t"
   5.126 -	"scasb\n\t"
   5.127 -	"jne 3f\n\t"
   5.128 -	"testb %%al,%%al\n\t"
   5.129 -	"jne 1b\n"
   5.130 -	"2:\txorl %%eax,%%eax\n\t"
   5.131 -	"jmp 4f\n"
   5.132 -	"3:\tsbbl %%eax,%%eax\n\t"
   5.133 -	"orb $1,%%al\n"
   5.134 -	"4:"
   5.135 -		     :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
   5.136 -		     :"1" (cs),"2" (ct),"3" (count));
   5.137 -return __res;
   5.138 -}
   5.139 -
   5.140 -#define __HAVE_ARCH_STRCHR
   5.141 -static inline char * strchr(const char * s, int c)
   5.142 -{
   5.143 -int d0;
   5.144 -register char * __res;
   5.145 -__asm__ __volatile__(
   5.146 -	"movb %%al,%%ah\n"
   5.147 -	"1:\tlodsb\n\t"
   5.148 -	"cmpb %%ah,%%al\n\t"
   5.149 -	"je 2f\n\t"
   5.150 -	"testb %%al,%%al\n\t"
   5.151 -	"jne 1b\n\t"
   5.152 -	"movl $1,%1\n"
   5.153 -	"2:\tmovl %1,%0\n\t"
   5.154 -	"decl %0"
   5.155 -	:"=a" (__res), "=&S" (d0) : "1" (s),"0" (c));
   5.156 -return __res;
   5.157 -}
   5.158 -
   5.159 -#define __HAVE_ARCH_STRRCHR
   5.160 -static inline char * strrchr(const char * s, int c)
   5.161 -{
   5.162 -int d0, d1;
   5.163 -register char * __res;
   5.164 -__asm__ __volatile__(
   5.165 -	"movb %%al,%%ah\n"
   5.166 -	"1:\tlodsb\n\t"
   5.167 -	"cmpb %%ah,%%al\n\t"
   5.168 -	"jne 2f\n\t"
   5.169 -	"leal -1(%%esi),%0\n"
   5.170 -	"2:\ttestb %%al,%%al\n\t"
   5.171 -	"jne 1b"
   5.172 -	:"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c));
   5.173 -return __res;
   5.174 -}
   5.175 -
   5.176 -#define __HAVE_ARCH_STRLEN
   5.177 -static inline size_t strlen(const char * s)
   5.178 -{
   5.179 -int d0;
   5.180 -register int __res;
   5.181 -__asm__ __volatile__(
   5.182 -	"repne\n\t"
   5.183 -	"scasb\n\t"
   5.184 -	"notl %0\n\t"
   5.185 -	"decl %0"
   5.186 -	:"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffff));
   5.187 -return __res;
   5.188 -}
   5.189 -
   5.190 -static inline void * __variable_memcpy(void * to, const void * from, size_t n)
   5.191 -{
   5.192 -int d0, d1, d2;
   5.193 -__asm__ __volatile__(
   5.194 -	"rep ; movsl\n\t"
   5.195 -	"testb $2,%b4\n\t"
   5.196 -	"je 1f\n\t"
   5.197 -	"movsw\n"
   5.198 -	"1:\ttestb $1,%b4\n\t"
   5.199 -	"je 2f\n\t"
   5.200 -	"movsb\n"
   5.201 -	"2:"
   5.202 -	: "=&c" (d0), "=&D" (d1), "=&S" (d2)
   5.203 -	:"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
   5.204 -	: "memory");
   5.205 -return (to);
   5.206 -}
   5.207 -
   5.208 -/*
   5.209 - * This looks horribly ugly, but the compiler can optimize it totally,
   5.210 - * as the count is constant.
   5.211 - */
   5.212 -static always_inline void * __constant_memcpy(void * to, const void * from, size_t n)
   5.213 -{
   5.214 -	switch (n) {
   5.215 -		case 0:
   5.216 -			return to;
   5.217 -		case 1:
   5.218 -			*(unsigned char *)to = *(const unsigned char *)from;
   5.219 -			return to;
   5.220 -		case 2:
   5.221 -			*(unsigned short *)to = *(const unsigned short *)from;
   5.222 -			return to;
   5.223 -		case 3:
   5.224 -			*(unsigned short *)to = *(const unsigned short *)from;
   5.225 -			*(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
   5.226 -			return to;
   5.227 -		case 4:
   5.228 -			*(unsigned long *)to = *(const unsigned long *)from;
   5.229 -			return to;
   5.230 -		case 6:	/* for Ethernet addresses */
   5.231 -			*(unsigned long *)to = *(const unsigned long *)from;
   5.232 -			*(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
   5.233 -			return to;
   5.234 -		case 8:
   5.235 -			*(unsigned long *)to = *(const unsigned long *)from;
   5.236 -			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
   5.237 -			return to;
   5.238 -		case 12:
   5.239 -			*(unsigned long *)to = *(const unsigned long *)from;
   5.240 -			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
   5.241 -			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
   5.242 -			return to;
   5.243 -		case 16:
   5.244 -			*(unsigned long *)to = *(const unsigned long *)from;
   5.245 -			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
   5.246 -			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
   5.247 -			*(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
   5.248 -			return to;
   5.249 -		case 20:
   5.250 -			*(unsigned long *)to = *(const unsigned long *)from;
   5.251 -			*(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
   5.252 -			*(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
   5.253 -			*(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
   5.254 -			*(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
   5.255 -			return to;
   5.256 -	}
   5.257 -#define COMMON(x) \
   5.258 -__asm__ __volatile__( \
   5.259 -	"rep ; movsl" \
   5.260 -	x \
   5.261 -	: "=&c" (d0), "=&D" (d1), "=&S" (d2) \
   5.262 -	: "0" (n/4),"1" ((long) to),"2" ((long) from) \
   5.263 -	: "memory");
   5.264 -{
   5.265 -	int d0, d1, d2;
   5.266 -	switch (n % 4) {
   5.267 -		case 0: COMMON(""); return to;
   5.268 -		case 1: COMMON("\n\tmovsb"); return to;
   5.269 -		case 2: COMMON("\n\tmovsw"); return to;
   5.270 -		default: COMMON("\n\tmovsw\n\tmovsb"); return to;
   5.271 -	}
   5.272 -}
   5.273 -  
   5.274 -#undef COMMON
   5.275 -}
   5.276 -
   5.277 -#define __HAVE_ARCH_MEMCPY
   5.278 -#define memcpy(t,f,n) (__memcpy((t),(f),(n)))
   5.279 -static always_inline
   5.280 -void *__memcpy(void *t, const void *f, size_t n)
   5.281 -{
   5.282 -	return (__builtin_constant_p(n) ?
   5.283 -	 __constant_memcpy((t),(f),(n)) :
   5.284 -	 __variable_memcpy((t),(f),(n)));
   5.285 -}
   5.286 -
   5.287 -/*
   5.288 - * struct_cpy(x,y), copy structure *x into (matching structure) *y.
   5.289 - *
   5.290 - * We get link-time errors if the structure sizes do not match.
   5.291 - * There is no runtime overhead, it's all optimized away at
   5.292 - * compile time.
   5.293 - */
   5.294 -//extern void __struct_cpy_bug (void);
   5.295 -
   5.296 -/*
   5.297 -#define struct_cpy(x,y) 			\
   5.298 -({						\
   5.299 -	if (sizeof(*(x)) != sizeof(*(y))) 	\
   5.300 -		__struct_cpy_bug;		\
   5.301 -	memcpy(x, y, sizeof(*(x)));		\
   5.302 -})
   5.303 -*/
   5.304 -
   5.305 -#define __HAVE_ARCH_MEMMOVE
   5.306 -#define memmove(dest,src,n) (__memmove((dest),(src),(n)))
   5.307 -static inline void *__memmove(void * dest,const void * src, size_t n)
   5.308 -{
   5.309 -int d0, d1, d2;
   5.310 -if (dest<src)
   5.311 -__asm__ __volatile__(
   5.312 -	"rep\n\t"
   5.313 -	"movsb"
   5.314 -	: "=&c" (d0), "=&S" (d1), "=&D" (d2)
   5.315 -	:"0" (n),"1" (src),"2" (dest)
   5.316 -	: "memory");
   5.317 -else
   5.318 -__asm__ __volatile__(
   5.319 -	"std\n\t"
   5.320 -	"rep\n\t"
   5.321 -	"movsb\n\t"
   5.322 -	"cld"
   5.323 -	: "=&c" (d0), "=&S" (d1), "=&D" (d2)
   5.324 -	:"0" (n),
   5.325 -	 "1" (n-1+(const char *)src),
   5.326 -	 "2" (n-1+(char *)dest)
   5.327 -	:"memory");
   5.328 -return dest;
   5.329 -}
   5.330 -
   5.331 -#define __HAVE_ARCH_MEMCMP
   5.332 -#define memcmp __builtin_memcmp
   5.333 -
   5.334 -#define __HAVE_ARCH_MEMCHR
   5.335 -static inline void * memchr(const void * cs,int c,size_t count)
   5.336 -{
   5.337 -int d0;
   5.338 -register void * __res;
   5.339 -if (!count)
   5.340 -	return NULL;
   5.341 -__asm__ __volatile__(
   5.342 -	"repne\n\t"
   5.343 -	"scasb\n\t"
   5.344 -	"je 1f\n\t"
   5.345 -	"movl $1,%0\n"
   5.346 -	"1:\tdecl %0"
   5.347 -	:"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
   5.348 -return __res;
   5.349 -}
   5.350 -
   5.351 -static inline void * __memset_generic(void * s, char c,size_t count)
   5.352 -{
   5.353 -int d0, d1;
   5.354 -__asm__ __volatile__(
   5.355 -	"rep\n\t"
   5.356 -	"stosb"
   5.357 -	: "=&c" (d0), "=&D" (d1)
   5.358 -	:"a" (c),"1" (s),"0" (count)
   5.359 -	:"memory");
   5.360 -return s;
   5.361 -}
   5.362 -
   5.363 -/* we might want to write optimized versions of these later */
   5.364 -#define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
   5.365 -
   5.366 -/*
   5.367 - * memset(x,0,y) is a reasonably common thing to do, so we want to fill
   5.368 - * things 32 bits at a time even when we don't know the size of the
   5.369 - * area at compile-time..
   5.370 - */
   5.371 -static inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
   5.372 -{
   5.373 -int d0, d1;
   5.374 -__asm__ __volatile__(
   5.375 -	"rep ; stosl\n\t"
   5.376 -	"testb $2,%b3\n\t"
   5.377 -	"je 1f\n\t"
   5.378 -	"stosw\n"
   5.379 -	"1:\ttestb $1,%b3\n\t"
   5.380 -	"je 2f\n\t"
   5.381 -	"stosb\n"
   5.382 -	"2:"
   5.383 -	: "=&c" (d0), "=&D" (d1)
   5.384 -	:"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
   5.385 -	:"memory");
   5.386 -return (s);	
   5.387 -}
   5.388 -
   5.389 -/* Added by Gertjan van Wingerde to make minix and sysv module work */
   5.390 -#define __HAVE_ARCH_STRNLEN
   5.391 -static inline size_t strnlen(const char * s, size_t count)
   5.392 -{
   5.393 -int d0;
   5.394 -register int __res;
   5.395 -__asm__ __volatile__(
   5.396 -	"movl %2,%0\n\t"
   5.397 -	"jmp 2f\n"
   5.398 -	"1:\tcmpb $0,(%0)\n\t"
   5.399 -	"je 3f\n\t"
   5.400 -	"incl %0\n"
   5.401 -	"2:\tdecl %1\n\t"
   5.402 -	"cmpl $-1,%1\n\t"
   5.403 -	"jne 1b\n"
   5.404 -	"3:\tsubl %2,%0"
   5.405 -	:"=a" (__res), "=&d" (d0)
   5.406 -	:"c" (s),"1" (count));
   5.407 -return __res;
   5.408 -}
   5.409 -/* end of additional stuff */
   5.410 -
   5.411 -//#define __HAVE_ARCH_STRSTR
   5.412 -
   5.413 -//extern char *strstr(const char *cs, const char *ct);
   5.414 -
   5.415 -/*
   5.416 - * This looks horribly ugly, but the compiler can optimize it totally,
   5.417 - * as we by now know that both pattern and count is constant..
   5.418 - */
   5.419 -static always_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
   5.420 -{
   5.421 -	switch (count) {
   5.422 -		case 0:
   5.423 -			return s;
   5.424 -		case 1:
   5.425 -			*(unsigned char *)s = pattern;
   5.426 -			return s;
   5.427 -		case 2:
   5.428 -			*(unsigned short *)s = pattern;
   5.429 -			return s;
   5.430 -		case 3:
   5.431 -			*(unsigned short *)s = pattern;
   5.432 -			*(2+(unsigned char *)s) = pattern;
   5.433 -			return s;
   5.434 -		case 4:
   5.435 -			*(unsigned long *)s = pattern;
   5.436 -			return s;
   5.437 -	}
   5.438 -#define COMMON(x) \
   5.439 -__asm__  __volatile__( \
   5.440 -	"rep ; stosl" \
   5.441 -	x \
   5.442 -	: "=&c" (d0), "=&D" (d1) \
   5.443 -	: "a" (pattern),"0" (count/4),"1" ((long) s) \
   5.444 -	: "memory")
   5.445 -{
   5.446 -	int d0, d1;
   5.447 -	switch (count % 4) {
   5.448 -		case 0: COMMON(""); return s;
   5.449 -		case 1: COMMON("\n\tstosb"); return s;
   5.450 -		case 2: COMMON("\n\tstosw"); return s;
   5.451 -		default: COMMON("\n\tstosw\n\tstosb"); return s;
   5.452 -	}
   5.453 -}
   5.454 -  
   5.455 -#undef COMMON
   5.456 -}
   5.457 -
   5.458 -#define __constant_c_x_memset(s, c, count) \
   5.459 -(__builtin_constant_p(count) ? \
   5.460 - __constant_c_and_count_memset((s),(c),(count)) : \
   5.461 - __constant_c_memset((s),(c),(count)))
   5.462 -
   5.463 -#define __var_x_memset(s, c, count) \
   5.464 -(__builtin_constant_p(count) ? \
   5.465 - __constant_count_memset((s),(c),(count)) : \
   5.466 - __memset_generic((s),(c),(count)))
   5.467 -
   5.468 -#define __HAVE_ARCH_MEMSET
   5.469 -#define memset(s, c, count) (__memset((s),(c),(count)))
   5.470 -#define __memset(s, c, count) \
   5.471 -(__builtin_constant_p(c) ? \
   5.472 - __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
   5.473 - __var_x_memset((s),(c),(count)))
   5.474 -
   5.475 -/*
   5.476 - * find the first occurrence of byte 'c', or 1 past the area if none
   5.477 - */
   5.478 -#define __HAVE_ARCH_MEMSCAN
   5.479 -static inline void * memscan(void * addr, int c, size_t size)
   5.480 -{
   5.481 -	if (!size)
   5.482 -		return addr;
   5.483 -	__asm__("repnz; scasb\n\t"
   5.484 -		"jnz 1f\n\t"
   5.485 -		"dec %%edi\n"
   5.486 -		"1:"
   5.487 -		: "=D" (addr), "=c" (size)
   5.488 -		: "0" (addr), "1" (size), "a" (c));
   5.489 -	return addr;
   5.490 -}
   5.491 -
   5.492 -#endif
     6.1 --- a/xen/include/asm-x86/x86_64/string.h	Mon Jun 06 10:52:53 2005 +0000
     6.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     6.3 @@ -1,16 +0,0 @@
     6.4 -#ifndef _X86_64_STRING_H_
     6.5 -#define _X86_64_STRING_H_
     6.6 -
     6.7 -#define __HAVE_ARCH_MEMCPY
     6.8 -#define memcpy(t,f,n) (__memcpy((t),(f),(n)))
     6.9 -#define __memcpy(t,f,n) (__builtin_memcpy((t),(f),(n)))
    6.10 -
    6.11 -#define __HAVE_ARCH_MEMSET
    6.12 -#define memset(s, c, count) (__memset((s),(c),(count)))
    6.13 -#define __memset(s, c, count) (__builtin_memset((s),(c),(count)))
    6.14 -
    6.15 -/* Some versions of 64-bit gcc don't have this built in. */
    6.16 -#define __HAVE_ARCH_MEMMOVE
    6.17 -extern void *memmove(void *dest, const void *src, size_t n);
    6.18 -
    6.19 -#endif