ia64/xen-unstable

changeset 3428:e936974c5b7e

bitkeeper revision 1.1159.170.92 (41e64706rQEeEkbb1iQ8WhbAgZNBFQ)

Clean up string functions (memcpy/memset).
author kaf24@scramble.cl.cam.ac.uk
date Thu Jan 13 10:01:42 2005 +0000 (2005-01-13)
parents 983a02b6959a
children cd78cc21face cd93e93dd285
files xen/common/string.c xen/include/asm-x86/x86_32/string.h
line diff
     1.1 --- a/xen/common/string.c	Wed Jan 12 15:00:14 2005 +0000
     1.2 +++ b/xen/common/string.c	Thu Jan 13 10:01:42 2005 +0000
     1.3 @@ -391,6 +391,7 @@ char * bcopy(const char * src, char * de
     1.4  }
     1.5  #endif
     1.6  
     1.7 +#ifndef __HAVE_ARCH_MEMCPY
     1.8  /**
     1.9   * memcpy - Copy one area of memory to another
    1.10   * @dest: Where to copy to
    1.11 @@ -400,7 +401,6 @@ char * bcopy(const char * src, char * de
    1.12   * You should not use this function to access IO space, use memcpy_toio()
    1.13   * or memcpy_fromio() instead.
    1.14   */
    1.15 -#undef memcpy
    1.16  void * memcpy(void * dest,const void *src,size_t count)
    1.17  {
    1.18  	char *tmp = (char *) dest, *s = (char *) src;
    1.19 @@ -410,6 +410,7 @@ void * memcpy(void * dest,const void *sr
    1.20  
    1.21  	return dest;
    1.22  }
    1.23 +#endif
    1.24  
    1.25  #ifndef __HAVE_ARCH_MEMMOVE
    1.26  /**
     2.1 --- a/xen/include/asm-x86/x86_32/string.h	Wed Jan 12 15:00:14 2005 +0000
     2.2 +++ b/xen/include/asm-x86/x86_32/string.h	Thu Jan 13 10:01:42 2005 +0000
     2.3 @@ -206,7 +206,7 @@ return (to);
     2.4   * This looks horribly ugly, but the compiler can optimize it totally,
     2.5   * as the count is constant.
     2.6   */
     2.7 -static inline void * __constant_memcpy(void * to, const void * from, size_t n)
     2.8 +static always_inline void * __constant_memcpy(void * to, const void * from, size_t n)
     2.9  {
    2.10  	switch (n) {
    2.11  		case 0:
    2.12 @@ -272,12 +272,13 @@ static inline void * __constant_memcpy(v
    2.13  }
    2.14  
    2.15  #define __HAVE_ARCH_MEMCPY
    2.16 -
    2.17 -#define memcpy(t, f, n) \
    2.18 -(__builtin_constant_p(n) ? \
    2.19 - __constant_memcpy((t),(f),(n)) : \
    2.20 - __memcpy((t),(f),(n)))
    2.21 -
    2.22 +static always_inline __attribute_used__
    2.23 +void memcpy(void *t, const void *f, size_t n)
    2.24 +{
    2.25 +	(__builtin_constant_p(n) ?
    2.26 +	 __constant_memcpy((t),(f),(n)) :
    2.27 +	 __memcpy((t),(f),(n)));
    2.28 +}
    2.29  
    2.30  /*
    2.31   * struct_cpy(x,y), copy structure *x into (matching structure) *y.
    2.32 @@ -410,7 +411,7 @@ return __res;
    2.33   * This looks horribly ugly, but the compiler can optimize it totally,
    2.34   * as we by now know that both pattern and count is constant..
    2.35   */
    2.36 -static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
    2.37 +static always_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
    2.38  {
    2.39  	switch (count) {
    2.40  		case 0: