debuggers.hg
changeset 16430:2052364cb456
x86: Clean up some files of inline asm, and fix
__copy_{to,from}_user_ll() for gcc 3.4 (asm output constraints all
need to be '=&').
Based on a patch by Jan Beulich.
Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
__copy_{to,from}_user_ll() for gcc 3.4 (asm output constraints all
need to be '=&').
Based on a patch by Jan Beulich.
Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author | Keir Fraser <keir.fraser@citrix.com> |
---|---|
date | Fri Nov 16 17:59:34 2007 +0000 (2007-11-16) |
parents | 5a72a99be911 |
children | 68c911f7733a |
files | xen/arch/x86/string.c xen/arch/x86/usercopy.c |
line diff
1.1 --- a/xen/arch/x86/string.c Fri Nov 16 17:09:53 2007 +0000 1.2 +++ b/xen/arch/x86/string.c Fri Nov 16 17:59:34 2007 +0000 1.3 @@ -13,7 +13,7 @@ void *memcpy(void *dest, const void *src 1.4 { 1.5 long d0, d1, d2; 1.6 1.7 - __asm__ __volatile__ ( 1.8 + asm volatile ( 1.9 #ifdef __i386__ 1.10 " rep movsl ; " 1.11 #else 1.12 @@ -42,7 +42,7 @@ void *memset(void *s, int c, size_t n) 1.13 { 1.14 long d0, d1; 1.15 1.16 - __asm__ __volatile__ ( 1.17 + asm volatile ( 1.18 "rep stosb" 1.19 : "=&c" (d0), "=&D" (d1) 1.20 : "a" (c), "1" (s), "0" (n) 1.21 @@ -59,7 +59,7 @@ void *memmove(void *dest, const void *sr 1.22 if ( dest < src ) 1.23 return memcpy(dest, src, n); 1.24 1.25 - __asm__ __volatile__ ( 1.26 + asm volatile ( 1.27 " std ; " 1.28 " rep movsb ; " 1.29 " cld " 1.30 @@ -69,3 +69,13 @@ void *memmove(void *dest, const void *sr 1.31 1.32 return dest; 1.33 } 1.34 + 1.35 +/* 1.36 + * Local variables: 1.37 + * mode: C 1.38 + * c-set-style: "BSD" 1.39 + * c-basic-offset: 4 1.40 + * tab-width: 4 1.41 + * indent-tabs-mode: nil 1.42 + * End: 1.43 + */
2.1 --- a/xen/arch/x86/usercopy.c Fri Nov 16 17:09:53 2007 +0000 2.2 +++ b/xen/arch/x86/usercopy.c Fri Nov 16 17:59:34 2007 +0000 2.3 @@ -12,83 +12,87 @@ 2.4 2.5 unsigned long __copy_to_user_ll(void __user *to, const void *from, unsigned n) 2.6 { 2.7 - unsigned long __d0, __d1, __d2, __n = n; 2.8 - __asm__ __volatile__( 2.9 - " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n" 2.10 - " jbe 1f\n" 2.11 - " mov %1,%0\n" 2.12 - " neg %0\n" 2.13 - " and $"STR(BYTES_PER_LONG-1)",%0\n" 2.14 - " sub %0,%3\n" 2.15 - "4: rep; movsb\n" /* make 'to' address aligned */ 2.16 - " mov %3,%0\n" 2.17 - " shr $"STR(LONG_BYTEORDER)",%0\n" 2.18 - " and $"STR(BYTES_PER_LONG-1)",%3\n" 2.19 - " .align 2,0x90\n" 2.20 - "0: rep; movs"__OS"\n" /* as many words as possible... */ 2.21 - " mov %3,%0\n" 2.22 - "1: rep; movsb\n" /* ...remainder copied as bytes */ 2.23 - "2:\n" 2.24 - ".section .fixup,\"ax\"\n" 2.25 - "5: add %3,%0\n" 2.26 - " jmp 2b\n" 2.27 - "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n" 2.28 - " jmp 2b\n" 2.29 - ".previous\n" 2.30 - ".section __ex_table,\"a\"\n" 2.31 - " "__FIXUP_ALIGN"\n" 2.32 - " "__FIXUP_WORD" 4b,5b\n" 2.33 - " "__FIXUP_WORD" 0b,3b\n" 2.34 - " "__FIXUP_WORD" 1b,2b\n" 2.35 - ".previous" 2.36 - : "=&c"(__n), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) 2.37 - : "3"(__n), "0"(__n), "1"(to), "2"(from) 2.38 - : "memory"); 2.39 - return (unsigned)__n; 2.40 + unsigned long __d0, __d1, __d2, __n = n; 2.41 + 2.42 + asm volatile ( 2.43 + " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n" 2.44 + " jbe 1f\n" 2.45 + " mov %1,%0\n" 2.46 + " neg %0\n" 2.47 + " and $"STR(BYTES_PER_LONG-1)",%0\n" 2.48 + " sub %0,%3\n" 2.49 + "4: rep movsb\n" /* make 'to' address aligned */ 2.50 + " mov %3,%0\n" 2.51 + " shr $"STR(LONG_BYTEORDER)",%0\n" 2.52 + " and $"STR(BYTES_PER_LONG-1)",%3\n" 2.53 + " .align 2,0x90\n" 2.54 + "0: rep movs"__OS"\n" /* as many words as possible... */ 2.55 + " mov %3,%0\n" 2.56 + "1: rep movsb\n" /* ...remainder copied as bytes */ 2.57 + "2:\n" 2.58 + ".section .fixup,\"ax\"\n" 2.59 + "5: add %3,%0\n" 2.60 + " jmp 2b\n" 2.61 + "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n" 2.62 + " jmp 2b\n" 2.63 + ".previous\n" 2.64 + ".section __ex_table,\"a\"\n" 2.65 + " "__FIXUP_ALIGN"\n" 2.66 + " "__FIXUP_WORD" 4b,5b\n" 2.67 + " "__FIXUP_WORD" 0b,3b\n" 2.68 + " "__FIXUP_WORD" 1b,2b\n" 2.69 + ".previous" 2.70 + : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2) 2.71 + : "0" (__n), "1" (to), "2" (from), "3" (__n) 2.72 + : "memory" ); 2.73 + 2.74 + return __n; 2.75 } 2.76 2.77 unsigned long 2.78 __copy_from_user_ll(void *to, const void __user *from, unsigned n) 2.79 { 2.80 - unsigned long __d0, __d1, __d2, __n = n; 2.81 - __asm__ __volatile__( 2.82 - " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n" 2.83 - " jbe 1f\n" 2.84 - " mov %1,%0\n" 2.85 - " neg %0\n" 2.86 - " and $"STR(BYTES_PER_LONG-1)",%0\n" 2.87 - " sub %0,%3\n" 2.88 - "4: rep; movsb\n" /* make 'to' address aligned */ 2.89 - " mov %3,%0\n" 2.90 - " shr $"STR(LONG_BYTEORDER)",%0\n" 2.91 - " and $"STR(BYTES_PER_LONG-1)",%3\n" 2.92 - " .align 2,0x90\n" 2.93 - "0: rep; movs"__OS"\n" /* as many words as possible... */ 2.94 - " mov %3,%0\n" 2.95 - "1: rep; movsb\n" /* ...remainder copied as bytes */ 2.96 - "2:\n" 2.97 - ".section .fixup,\"ax\"\n" 2.98 - "5: add %3,%0\n" 2.99 - " jmp 6f\n" 2.100 - "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n" 2.101 - "6: push %0\n" 2.102 - " push %%"__OP"ax\n" 2.103 - " xor %%eax,%%eax\n" 2.104 - " rep; stosb\n" 2.105 - " pop %%"__OP"ax\n" 2.106 - " pop %0\n" 2.107 - " jmp 2b\n" 2.108 - ".previous\n" 2.109 - ".section __ex_table,\"a\"\n" 2.110 - " "__FIXUP_ALIGN"\n" 2.111 - " "__FIXUP_WORD" 4b,5b\n" 2.112 - " "__FIXUP_WORD" 0b,3b\n" 2.113 - " "__FIXUP_WORD" 1b,6b\n" 2.114 - ".previous" 2.115 - : "=&c"(__n), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) 2.116 - : "3"(__n), "0"(__n), "1"(to), "2"(from) 2.117 - : "memory"); 2.118 - return (unsigned)__n; 2.119 + unsigned long __d0, __d1, __d2, __n = n; 2.120 + 2.121 + asm volatile ( 2.122 + " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n" 2.123 + " jbe 1f\n" 2.124 + " mov %1,%0\n" 2.125 + " neg %0\n" 2.126 + " and $"STR(BYTES_PER_LONG-1)",%0\n" 2.127 + " sub %0,%3\n" 2.128 + "4: rep; movsb\n" /* make 'to' address aligned */ 2.129 + " mov %3,%0\n" 2.130 + " shr $"STR(LONG_BYTEORDER)",%0\n" 2.131 + " and $"STR(BYTES_PER_LONG-1)",%3\n" 2.132 + " .align 2,0x90\n" 2.133 + "0: rep; movs"__OS"\n" /* as many words as possible... */ 2.134 + " mov %3,%0\n" 2.135 + "1: rep; movsb\n" /* ...remainder copied as bytes */ 2.136 + "2:\n" 2.137 + ".section .fixup,\"ax\"\n" 2.138 + "5: add %3,%0\n" 2.139 + " jmp 6f\n" 2.140 + "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n" 2.141 + "6: push %0\n" 2.142 + " push %%"__OP"ax\n" 2.143 + " xor %%eax,%%eax\n" 2.144 + " rep; stosb\n" 2.145 + " pop %%"__OP"ax\n" 2.146 + " pop %0\n" 2.147 + " jmp 2b\n" 2.148 + ".previous\n" 2.149 + ".section __ex_table,\"a\"\n" 2.150 + " "__FIXUP_ALIGN"\n" 2.151 + " "__FIXUP_WORD" 4b,5b\n" 2.152 + " "__FIXUP_WORD" 0b,3b\n" 2.153 + " "__FIXUP_WORD" 1b,6b\n" 2.154 + ".previous" 2.155 + : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2) 2.156 + : "0" (__n), "1" (to), "2" (from), "3" (__n) 2.157 + : "memory" ); 2.158 + 2.159 + return __n; 2.160 } 2.161 2.162 /** 2.163 @@ -107,9 +111,9 @@ unsigned long 2.164 unsigned long 2.165 copy_to_user(void __user *to, const void *from, unsigned n) 2.166 { 2.167 - if (access_ok(to, n)) 2.168 - n = __copy_to_user(to, from, n); 2.169 - return n; 2.170 + if ( access_ok(to, n) ) 2.171 + n = __copy_to_user(to, from, n); 2.172 + return n; 2.173 } 2.174 2.175 /** 2.176 @@ -131,9 +135,19 @@ copy_to_user(void __user *to, const void 2.177 unsigned long 2.178 copy_from_user(void *to, const void __user *from, unsigned n) 2.179 { 2.180 - if (access_ok(from, n)) 2.181 - n = __copy_from_user(to, from, n); 2.182 - else 2.183 - memset(to, 0, n); 2.184 - return n; 2.185 + if ( access_ok(from, n) ) 2.186 + n = __copy_from_user(to, from, n); 2.187 + else 2.188 + memset(to, 0, n); 2.189 + return n; 2.190 } 2.191 + 2.192 +/* 2.193 + * Local variables: 2.194 + * mode: C 2.195 + * c-set-style: "BSD" 2.196 + * c-basic-offset: 4 2.197 + * tab-width: 4 2.198 + * indent-tabs-mode: nil 2.199 + * End: 2.200 + */