debuggers.hg

changeset 22674:2762b6d3149c

x86-64: use PC-relative exception table entries

... thus allowing to make the entries half their current size. Rather
than adjusting all instances to the new layout, abstract the
construction the table entries via a macro (paralleling a similar one
in recent Linux).

Also change the name of the section (to allow easier detection of
missed cases) and merge the final resulting output sections into
.data.read_mostly.

Signed-off-by: Jan Beulich <jbeulich@novell.com>
author Keir Fraser <keir@xen.org>
date Fri Dec 24 08:47:23 2010 +0000 (2010-12-24)
parents ef30046259f0
children 3daa79a074b3
files xen/arch/x86/cpu/amd.c xen/arch/x86/domain.c xen/arch/x86/extable.c xen/arch/x86/i387.c xen/arch/x86/usercopy.c xen/arch/x86/x86_32/asm-offsets.c xen/arch/x86/x86_32/entry.S xen/arch/x86/x86_64/asm-offsets.c xen/arch/x86/x86_64/compat/entry.S xen/arch/x86/x86_64/entry.S xen/arch/x86/x86_64/mm.c xen/arch/x86/xen.lds.S xen/include/asm-x86/asm_defns.h xen/include/asm-x86/config.h xen/include/asm-x86/hvm/vmx/vmx.h xen/include/asm-x86/msr.h xen/include/asm-x86/uaccess.h xen/include/asm-x86/x86_32/asm_defns.h xen/include/asm-x86/x86_32/system.h xen/include/asm-x86/x86_32/uaccess.h xen/include/asm-x86/x86_64/asm_defns.h xen/include/asm-x86/x86_64/system.h
line diff
     1.1 --- a/xen/arch/x86/cpu/amd.c	Fri Dec 24 08:46:46 2010 +0000
     1.2 +++ b/xen/arch/x86/cpu/amd.c	Fri Dec 24 08:47:23 2010 +0000
     1.3 @@ -53,10 +53,7 @@ static inline int rdmsr_amd_safe(unsigne
     1.4  		     "3: movl %6,%2\n"
     1.5  		     "   jmp 2b\n"
     1.6  		     ".previous\n"
     1.7 -		     ".section __ex_table,\"a\"\n"
     1.8 -		     __FIXUP_ALIGN "\n"
     1.9 -		     __FIXUP_WORD " 1b,3b\n"
    1.10 -		     ".previous\n"
    1.11 +		     _ASM_EXTABLE(1b, 3b)
    1.12  		     : "=a" (*lo), "=d" (*hi), "=r" (err)
    1.13  		     : "c" (msr), "D" (0x9c5a203a), "2" (0), "i" (-EFAULT));
    1.14  
    1.15 @@ -73,10 +70,7 @@ static inline int wrmsr_amd_safe(unsigne
    1.16  		     "3: movl %6,%0\n"
    1.17  		     "   jmp 2b\n"
    1.18  		     ".previous\n"
    1.19 -		     ".section __ex_table,\"a\"\n"
    1.20 -		     __FIXUP_ALIGN "\n"
    1.21 -		     __FIXUP_WORD " 1b,3b\n"
    1.22 -		     ".previous\n"
    1.23 +		     _ASM_EXTABLE(1b, 3b)
    1.24  		     : "=r" (err)
    1.25  		     : "c" (msr), "a" (lo), "d" (hi), "D" (0x9c5a203a),
    1.26  		       "0" (0), "i" (-EFAULT));
     2.1 --- a/xen/arch/x86/domain.c	Fri Dec 24 08:46:46 2010 +0000
     2.2 +++ b/xen/arch/x86/domain.c	Fri Dec 24 08:47:23 2010 +0000
     2.3 @@ -1070,10 +1070,7 @@ arch_do_vcpu_op(
     2.4          "   movl %k0,%%" #seg "\n"              \
     2.5          "   jmp 2b\n"                           \
     2.6          ".previous\n"                           \
     2.7 -        ".section __ex_table,\"a\"\n"           \
     2.8 -        "   .align 8\n"                         \
     2.9 -        "   .quad 1b,3b\n"                      \
    2.10 -        ".previous"                             \
    2.11 +        _ASM_EXTABLE(1b, 3b)                    \
    2.12          : "=r" (__r) : "r" (value), "0" (__r) );\
    2.13      __r; })
    2.14  
     3.1 --- a/xen/arch/x86/extable.c	Fri Dec 24 08:46:46 2010 +0000
     3.2 +++ b/xen/arch/x86/extable.c	Fri Dec 24 08:47:23 2010 +0000
     3.3 @@ -2,6 +2,7 @@
     3.4  #include <xen/config.h>
     3.5  #include <xen/init.h>
     3.6  #include <xen/perfc.h>
     3.7 +#include <xen/sort.h>
     3.8  #include <xen/spinlock.h>
     3.9  #include <asm/uaccess.h>
    3.10  
    3.11 @@ -10,29 +11,58 @@ extern struct exception_table_entry __st
    3.12  extern struct exception_table_entry __start___pre_ex_table[];
    3.13  extern struct exception_table_entry __stop___pre_ex_table[];
    3.14  
    3.15 -static void __init sort_exception_table(struct exception_table_entry *start,
    3.16 -                                        struct exception_table_entry *end)
    3.17 +#ifdef __i386__
    3.18 +#define EX_FIELD(ptr, field) (ptr)->field
    3.19 +#define swap_ex NULL
    3.20 +#else
    3.21 +#define EX_FIELD(ptr, field) ((unsigned long)&(ptr)->field + (ptr)->field)
    3.22 +#endif
    3.23 +
    3.24 +static inline unsigned long ex_addr(const struct exception_table_entry *x)
    3.25  {
    3.26 -    struct exception_table_entry *p, *q, tmp;
    3.27 +	return EX_FIELD(x, addr);
    3.28 +}
    3.29 +
    3.30 +static inline unsigned long ex_cont(const struct exception_table_entry *x)
    3.31 +{
    3.32 +	return EX_FIELD(x, cont);
    3.33 +}
    3.34  
    3.35 -    for ( p = start; p < end; p++ )
    3.36 -    {
    3.37 -        for ( q = p-1; q > start; q-- )
    3.38 -            if ( p->insn > q->insn )
    3.39 -                break;
    3.40 -        if ( ++q != p )
    3.41 -        {
    3.42 -            tmp = *p;
    3.43 -            memmove(q+1, q, (p-q)*sizeof(*p));
    3.44 -            *q = tmp;
    3.45 -        }
    3.46 -    }
    3.47 +static int __init cmp_ex(const void *a, const void *b)
    3.48 +{
    3.49 +	const struct exception_table_entry *l = a, *r = b;
    3.50 +	unsigned long lip = ex_addr(l);
    3.51 +	unsigned long rip = ex_addr(r);
    3.52 +
    3.53 +	/* avoid overflow */
    3.54 +	if (lip > rip)
    3.55 +		return 1;
    3.56 +	if (lip < rip)
    3.57 +		return -1;
    3.58 +	return 0;
    3.59  }
    3.60  
    3.61 +#ifndef swap_ex
    3.62 +static void __init swap_ex(void *a, void *b, int size)
    3.63 +{
    3.64 +	struct exception_table_entry *l = a, *r = b, tmp;
    3.65 +	long delta = b - a;
    3.66 +
    3.67 +	tmp = *l;
    3.68 +	l->addr = r->addr + delta;
    3.69 +	l->cont = r->cont + delta;
    3.70 +	r->addr = tmp.addr - delta;
    3.71 +	r->cont = tmp.cont - delta;
    3.72 +}
    3.73 +#endif
    3.74 +
    3.75  void __init sort_exception_tables(void)
    3.76  {
    3.77 -    sort_exception_table(__start___ex_table, __stop___ex_table);
    3.78 -    sort_exception_table(__start___pre_ex_table, __stop___pre_ex_table);
    3.79 +    sort(__start___ex_table, __stop___ex_table - __start___ex_table,
    3.80 +         sizeof(struct exception_table_entry), cmp_ex, swap_ex);
    3.81 +    sort(__start___pre_ex_table,
    3.82 +         __stop___pre_ex_table - __start___pre_ex_table,
    3.83 +         sizeof(struct exception_table_entry), cmp_ex, swap_ex);
    3.84  }
    3.85  
    3.86  static inline unsigned long
    3.87 @@ -46,9 +76,9 @@ search_one_table(const struct exception_
    3.88      while ( first <= last )
    3.89      {
    3.90          mid = (last - first) / 2 + first;
    3.91 -        diff = mid->insn - value;
    3.92 +        diff = ex_addr(mid) - value;
    3.93          if (diff == 0)
    3.94 -            return mid->fixup;
    3.95 +            return ex_cont(mid);
    3.96          else if (diff < 0)
    3.97              first = mid+1;
    3.98          else
     4.1 --- a/xen/arch/x86/i387.c	Fri Dec 24 08:46:46 2010 +0000
     4.2 +++ b/xen/arch/x86/i387.c	Fri Dec 24 08:47:23 2010 +0000
     4.3 @@ -122,10 +122,7 @@ void restore_fpu(struct vcpu *v)
     4.4              "   pop  %%"__OP"ax       \n"
     4.5              "   jmp  1b               \n"
     4.6              ".previous                \n"
     4.7 -            ".section __ex_table,\"a\"\n"
     4.8 -            "   "__FIXUP_ALIGN"       \n"
     4.9 -            "   "__FIXUP_WORD" 1b,2b  \n"
    4.10 -            ".previous                \n"
    4.11 +            _ASM_EXTABLE(1b, 2b)
    4.12              : 
    4.13              : "m" (*fpu_ctxt),
    4.14                "i" (sizeof(v->arch.guest_context.fpu_ctxt)/4)
     5.1 --- a/xen/arch/x86/usercopy.c	Fri Dec 24 08:46:46 2010 +0000
     5.2 +++ b/xen/arch/x86/usercopy.c	Fri Dec 24 08:47:23 2010 +0000
     5.3 @@ -36,12 +36,9 @@ unsigned long __copy_to_user_ll(void __u
     5.4          "3:  lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
     5.5          "    jmp 2b\n"
     5.6          ".previous\n"
     5.7 -        ".section __ex_table,\"a\"\n"
     5.8 -        "    "__FIXUP_ALIGN"\n"
     5.9 -        "    "__FIXUP_WORD" 4b,5b\n"
    5.10 -        "    "__FIXUP_WORD" 0b,3b\n"
    5.11 -        "    "__FIXUP_WORD" 1b,2b\n"
    5.12 -        ".previous"
    5.13 +        _ASM_EXTABLE(4b, 5b)
    5.14 +        _ASM_EXTABLE(0b, 3b)
    5.15 +        _ASM_EXTABLE(1b, 2b)
    5.16          : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
    5.17          : "0" (__n), "1" (to), "2" (from), "3" (__n)
    5.18          : "memory" );
    5.19 @@ -82,12 +79,9 @@ unsigned long
    5.20          "    pop  %0\n"
    5.21          "    jmp 2b\n"
    5.22          ".previous\n"
    5.23 -        ".section __ex_table,\"a\"\n"
    5.24 -        "    "__FIXUP_ALIGN"\n"
    5.25 -        "    "__FIXUP_WORD" 4b,5b\n"
    5.26 -        "    "__FIXUP_WORD" 0b,3b\n"
    5.27 -        "    "__FIXUP_WORD" 1b,6b\n"
    5.28 -        ".previous"
    5.29 +        _ASM_EXTABLE(4b, 5b)
    5.30 +        _ASM_EXTABLE(0b, 3b)
    5.31 +        _ASM_EXTABLE(1b, 6b)
    5.32          : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
    5.33          : "0" (__n), "1" (to), "2" (from), "3" (__n)
    5.34          : "memory" );
     6.1 --- a/xen/arch/x86/x86_32/asm-offsets.c	Fri Dec 24 08:46:46 2010 +0000
     6.2 +++ b/xen/arch/x86/x86_32/asm-offsets.c	Fri Dec 24 08:47:23 2010 +0000
     6.3 @@ -3,6 +3,7 @@
     6.4   * This code generates raw asm output which is post-processed
     6.5   * to extract and format the required data.
     6.6   */
     6.7 +#define COMPILE_OFFSETS
     6.8  
     6.9  #include <xen/config.h>
    6.10  #include <xen/perfc.h>
     7.1 --- a/xen/arch/x86/x86_32/entry.S	Fri Dec 24 08:46:46 2010 +0000
     7.2 +++ b/xen/arch/x86/x86_32/entry.S	Fri Dec 24 08:47:23 2010 +0000
     7.3 @@ -119,16 +119,12 @@ 1:      call  create_bounce_frame
     7.4          movl  %eax,UREGS_gs(%esp)
     7.5          jmp   test_all_events
     7.6  .previous
     7.7 -.section __pre_ex_table,"a"
     7.8 -        .long .Lft1,.Lfx1
     7.9 -        .long .Lft2,.Lfx1
    7.10 -        .long .Lft3,.Lfx1
    7.11 -        .long .Lft4,.Lfx1
    7.12 -        .long .Lft5,.Lfx1
    7.13 -.previous
    7.14 -.section __ex_table,"a"
    7.15 -        .long .Ldf1,failsafe_callback
    7.16 -.previous
    7.17 +        _ASM_PRE_EXTABLE(.Lft1, .Lfx1)
    7.18 +        _ASM_PRE_EXTABLE(.Lft2, .Lfx1)
    7.19 +        _ASM_PRE_EXTABLE(.Lft3, .Lfx1)
    7.20 +        _ASM_PRE_EXTABLE(.Lft4, .Lfx1)
    7.21 +        _ASM_PRE_EXTABLE(.Lft5, .Lfx1)
    7.22 +        _ASM_EXTABLE(.Ldf1, failsafe_callback)
    7.23  
    7.24          ALIGN
    7.25  restore_all_xen:
    7.26 @@ -392,18 +388,26 @@ UNLIKELY_END(bounce_vm86_3)
    7.27          movl TRAPBOUNCE_eip(%edx),%eax
    7.28          movl %eax,UREGS_eip+4(%esp)
    7.29          ret
    7.30 -.section __ex_table,"a"
    7.31 -        .long  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
    7.32 -        .long  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
    7.33 -        .long .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
    7.34 -        .long .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
    7.35 -        .long .Lft14,domain_crash_synchronous , .Lft15,domain_crash_synchronous
    7.36 -        .long .Lft16,domain_crash_synchronous , .Lft17,domain_crash_synchronous
    7.37 -        .long .Lft18,domain_crash_synchronous , .Lft19,domain_crash_synchronous
    7.38 -        .long .Lft20,domain_crash_synchronous , .Lft21,domain_crash_synchronous
    7.39 -        .long .Lft22,domain_crash_synchronous , .Lft23,domain_crash_synchronous
    7.40 -        .long .Lft24,domain_crash_synchronous , .Lft25,domain_crash_synchronous
    7.41 -.previous
    7.42 +        _ASM_EXTABLE(.Lft6,  domain_crash_synchronous)
    7.43 +        _ASM_EXTABLE(.Lft7,  domain_crash_synchronous)
    7.44 +        _ASM_EXTABLE(.Lft8,  domain_crash_synchronous)
    7.45 +        _ASM_EXTABLE(.Lft9,  domain_crash_synchronous)
    7.46 +        _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
    7.47 +        _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
    7.48 +        _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
    7.49 +        _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
    7.50 +        _ASM_EXTABLE(.Lft14, domain_crash_synchronous)
    7.51 +        _ASM_EXTABLE(.Lft15, domain_crash_synchronous)
    7.52 +        _ASM_EXTABLE(.Lft16, domain_crash_synchronous)
    7.53 +        _ASM_EXTABLE(.Lft17, domain_crash_synchronous)
    7.54 +        _ASM_EXTABLE(.Lft18, domain_crash_synchronous)
    7.55 +        _ASM_EXTABLE(.Lft19, domain_crash_synchronous)
    7.56 +        _ASM_EXTABLE(.Lft20, domain_crash_synchronous)
    7.57 +        _ASM_EXTABLE(.Lft21, domain_crash_synchronous)
    7.58 +        _ASM_EXTABLE(.Lft22, domain_crash_synchronous)
    7.59 +        _ASM_EXTABLE(.Lft23, domain_crash_synchronous)
    7.60 +        _ASM_EXTABLE(.Lft24, domain_crash_synchronous)
    7.61 +        _ASM_EXTABLE(.Lft25, domain_crash_synchronous)
    7.62  
    7.63  domain_crash_synchronous_string:
    7.64          .asciz "domain_crash_sync called from entry.S (%lx)\n"
     8.1 --- a/xen/arch/x86/x86_64/asm-offsets.c	Fri Dec 24 08:46:46 2010 +0000
     8.2 +++ b/xen/arch/x86/x86_64/asm-offsets.c	Fri Dec 24 08:47:23 2010 +0000
     8.3 @@ -3,6 +3,7 @@
     8.4   * This code generates raw asm output which is post-processed
     8.5   * to extract and format the required data.
     8.6   */
     8.7 +#define COMPILE_OFFSETS
     8.8  
     8.9  #include <xen/config.h>
    8.10  #include <xen/perfc.h>
     9.1 --- a/xen/arch/x86/x86_64/compat/entry.S	Fri Dec 24 08:46:46 2010 +0000
     9.2 +++ b/xen/arch/x86/x86_64/compat/entry.S	Fri Dec 24 08:47:23 2010 +0000
     9.3 @@ -197,12 +197,8 @@ compat_failsafe_callback:
     9.4  1:      call  compat_create_bounce_frame
     9.5          jmp   compat_test_all_events
     9.6  .previous
     9.7 -.section __pre_ex_table,"a"
     9.8 -	.quad .Lft0,.Lfx0
     9.9 -.previous
    9.10 -.section __ex_table,"a"
    9.11 -        .quad .Ldf0,compat_failsafe_callback
    9.12 -.previous
    9.13 +        _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
    9.14 +        _ASM_EXTABLE(.Ldf0, compat_failsafe_callback)
    9.15  
    9.16  /* %rdx: trap_bounce, %rbx: struct vcpu */
    9.17  ENTRY(compat_post_handle_exception)
    9.18 @@ -330,15 +326,19 @@ UNLIKELY_END(compat_bounce_failsafe)
    9.19          xorl  %edi,%edi
    9.20          jmp   .Lft13
    9.21  .previous
    9.22 -.section __ex_table,"a"
    9.23 -        .quad  .Lft1,domain_crash_synchronous  ,  .Lft2,compat_crash_page_fault
    9.24 -        .quad  .Lft3,compat_crash_page_fault_4 ,  .Lft4,domain_crash_synchronous
    9.25 -        .quad  .Lft5,compat_crash_page_fault_4 ,  .Lft6,compat_crash_page_fault_8
    9.26 -        .quad  .Lft7,compat_crash_page_fault   ,  .Lft8,compat_crash_page_fault
    9.27 -        .quad  .Lft9,compat_crash_page_fault_12, .Lft10,compat_crash_page_fault_8
    9.28 -        .quad .Lft11,compat_crash_page_fault_4 , .Lft12,compat_crash_page_fault
    9.29 -        .quad .Lft13,.Lfx13
    9.30 -.previous
    9.31 +        _ASM_EXTABLE(.Lft1,  domain_crash_synchronous)
    9.32 +        _ASM_EXTABLE(.Lft2,  compat_crash_page_fault)
    9.33 +        _ASM_EXTABLE(.Lft3,  compat_crash_page_fault_4)
    9.34 +        _ASM_EXTABLE(.Lft4,  domain_crash_synchronous)
    9.35 +        _ASM_EXTABLE(.Lft5,  compat_crash_page_fault_4)
    9.36 +        _ASM_EXTABLE(.Lft6,  compat_crash_page_fault_8)
    9.37 +        _ASM_EXTABLE(.Lft7,  compat_crash_page_fault)
    9.38 +        _ASM_EXTABLE(.Lft8,  compat_crash_page_fault)
    9.39 +        _ASM_EXTABLE(.Lft9,  compat_crash_page_fault_12)
    9.40 +        _ASM_EXTABLE(.Lft10, compat_crash_page_fault_8)
    9.41 +        _ASM_EXTABLE(.Lft11, compat_crash_page_fault_4)
    9.42 +        _ASM_EXTABLE(.Lft12, compat_crash_page_fault)
    9.43 +        _ASM_EXTABLE(.Lft13, .Lfx13)
    9.44  
    9.45  compat_crash_page_fault_12:
    9.46          addl  $4,%esi
    9.47 @@ -356,9 +356,7 @@ compat_crash_page_fault:
    9.48          xorl  %edi,%edi
    9.49          jmp   .Lft14
    9.50  .previous
    9.51 -.section __ex_table,"a"
    9.52 -        .quad .Lft14,.Lfx14
    9.53 -.previous
    9.54 +        _ASM_EXTABLE(.Lft14, .Lfx14)
    9.55  
    9.56  .section .rodata, "a", @progbits
    9.57  
    10.1 --- a/xen/arch/x86/x86_64/entry.S	Fri Dec 24 08:46:46 2010 +0000
    10.2 +++ b/xen/arch/x86/x86_64/entry.S	Fri Dec 24 08:47:23 2010 +0000
    10.3 @@ -84,12 +84,8 @@ failsafe_callback:
    10.4  1:      call  create_bounce_frame
    10.5          jmp   test_all_events
    10.6  .previous
    10.7 -.section __pre_ex_table,"a"
    10.8 -        .quad .Lft0,.Lfx0
    10.9 -.previous
   10.10 -.section __ex_table,"a"
   10.11 -        .quad .Ldf0,failsafe_callback
   10.12 -.previous
   10.13 +        _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
   10.14 +        _ASM_EXTABLE(.Ldf0, failsafe_callback)
   10.15  
   10.16          ALIGN
   10.17  /* No special register assumptions. */
   10.18 @@ -412,14 +408,18 @@ UNLIKELY_END(bounce_failsafe)
   10.19          jz    domain_crash_synchronous
   10.20          movq  %rax,UREGS_rip+8(%rsp)
   10.21          ret
   10.22 -.section __ex_table,"a"
   10.23 -        .quad  .Lft2,domain_crash_synchronous ,  .Lft3,domain_crash_synchronous
   10.24 -        .quad  .Lft4,domain_crash_synchronous ,  .Lft5,domain_crash_synchronous
   10.25 -        .quad  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
   10.26 -        .quad  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
   10.27 -        .quad .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
   10.28 -        .quad .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
   10.29 -.previous
   10.30 +        _ASM_EXTABLE(.Lft2,  domain_crash_synchronous)
   10.31 +        _ASM_EXTABLE(.Lft3,  domain_crash_synchronous)
   10.32 +        _ASM_EXTABLE(.Lft4,  domain_crash_synchronous)
   10.33 +        _ASM_EXTABLE(.Lft5,  domain_crash_synchronous)
   10.34 +        _ASM_EXTABLE(.Lft6,  domain_crash_synchronous)
   10.35 +        _ASM_EXTABLE(.Lft7,  domain_crash_synchronous)
   10.36 +        _ASM_EXTABLE(.Lft8,  domain_crash_synchronous)
   10.37 +        _ASM_EXTABLE(.Lft9,  domain_crash_synchronous)
   10.38 +        _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
   10.39 +        _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
   10.40 +        _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
   10.41 +        _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
   10.42  
   10.43  domain_crash_synchronous_string:
   10.44          .asciz "domain_crash_sync called from entry.S\n"
    11.1 --- a/xen/arch/x86/x86_64/mm.c	Fri Dec 24 08:46:46 2010 +0000
    11.2 +++ b/xen/arch/x86/x86_64/mm.c	Fri Dec 24 08:47:23 2010 +0000
    11.3 @@ -1119,10 +1119,7 @@ long do_set_segment_base(unsigned int wh
    11.4              "2:   xorl %k0,%k0        \n"
    11.5              "     jmp  1b             \n"
    11.6              ".previous                \n"
    11.7 -            ".section __ex_table,\"a\"\n"
    11.8 -            "    .align 8             \n"
    11.9 -            "    .quad 1b,2b          \n"
   11.10 -            ".previous                  "
   11.11 +            _ASM_EXTABLE(1b, 2b)
   11.12              : : "r" (base&0xffff) );
   11.13          break;
   11.14  
    12.1 --- a/xen/arch/x86/xen.lds.S	Fri Dec 24 08:46:46 2010 +0000
    12.2 +++ b/xen/arch/x86/xen.lds.S	Fri Dec 24 08:47:23 2010 +0000
    12.3 @@ -38,18 +38,19 @@ SECTIONS
    12.4         *(.rodata.*)
    12.5    } :text
    12.6  
    12.7 -  . = ALIGN(32);               /* Exception table */
    12.8 -  __ex_table : {
    12.9 +  . = ALIGN(SMP_CACHE_BYTES);
   12.10 +  .data.read_mostly : {
   12.11 +       /* Exception table */
   12.12         __start___ex_table = .;
   12.13 -       *(__ex_table)
   12.14 +       *(.ex_table)
   12.15         __stop___ex_table = .;
   12.16 -  } :text
   12.17  
   12.18 -  . = ALIGN(32);               /* Pre-exception table */
   12.19 -  __pre_ex_table : {
   12.20 +       /* Pre-exception table */
   12.21         __start___pre_ex_table = .;
   12.22 -       *(__pre_ex_table)
   12.23 +       *(.ex_table.pre)
   12.24         __stop___pre_ex_table = .;
   12.25 +
   12.26 +       *(.data.read_mostly)
   12.27    } :text
   12.28  
   12.29    .data : {                    /* Data */
   12.30 @@ -59,11 +60,6 @@ SECTIONS
   12.31         CONSTRUCTORS
   12.32    } :text
   12.33  
   12.34 -  . = ALIGN(SMP_CACHE_BYTES);
   12.35 -  .data.read_mostly : {
   12.36 -       *(.data.read_mostly)
   12.37 -  } :text
   12.38 -
   12.39  #ifdef LOCK_PROFILE
   12.40    . = ALIGN(32);
   12.41    __lock_profile_start = .;
    13.1 --- a/xen/include/asm-x86/asm_defns.h	Fri Dec 24 08:46:46 2010 +0000
    13.2 +++ b/xen/include/asm-x86/asm_defns.h	Fri Dec 24 08:47:23 2010 +0000
    13.3 @@ -2,8 +2,10 @@
    13.4  #ifndef __X86_ASM_DEFNS_H__
    13.5  #define __X86_ASM_DEFNS_H__
    13.6  
    13.7 +#ifndef COMPILE_OFFSETS
    13.8  /* NB. Auto-generated from arch/.../asm-offsets.c */
    13.9  #include <asm/asm-offsets.h>
   13.10 +#endif
   13.11  #include <asm/processor.h>
   13.12  
   13.13  #ifdef __x86_64__
   13.14 @@ -12,6 +14,24 @@
   13.15  #include <asm/x86_32/asm_defns.h>
   13.16  #endif
   13.17  
   13.18 +/* Exception table entry */
   13.19 +#ifdef __ASSEMBLY__
   13.20 +# define _ASM__EXTABLE(sfx, from, to)             \
   13.21 +    .section .ex_table##sfx, "a" ;                \
   13.22 +    .balign 4 ;                                   \
   13.23 +    .long _ASM_EX(from), _ASM_EX(to) ;            \
   13.24 +    .previous
   13.25 +#else
   13.26 +# define _ASM__EXTABLE(sfx, from, to)             \
   13.27 +    " .section .ex_table" #sfx ",\"a\"\n"         \
   13.28 +    " .balign 4\n"                                \
   13.29 +    " .long " _ASM_EX(from) ", " _ASM_EX(to) "\n" \
   13.30 +    " .previous\n"
   13.31 +#endif
   13.32 +
   13.33 +#define _ASM_EXTABLE(from, to)     _ASM__EXTABLE(, from, to)
   13.34 +#define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
   13.35 +
   13.36  #ifdef __ASSEMBLY__
   13.37  
   13.38  #define UNLIKELY_START(cond, tag) \
    14.1 --- a/xen/include/asm-x86/config.h	Fri Dec 24 08:46:46 2010 +0000
    14.2 +++ b/xen/include/asm-x86/config.h	Fri Dec 24 08:47:23 2010 +0000
    14.3 @@ -274,8 +274,6 @@ extern unsigned int video_mode, video_fl
    14.4  /* For generic assembly code: use macros to define operation/operand sizes. */
    14.5  #define __OS          "q"  /* Operation Suffix */
    14.6  #define __OP          "r"  /* Operand Prefix */
    14.7 -#define __FIXUP_ALIGN ".align 8"
    14.8 -#define __FIXUP_WORD  ".quad"
    14.9  
   14.10  #elif defined(__i386__)
   14.11  
   14.12 @@ -351,8 +349,6 @@ extern unsigned int video_mode, video_fl
   14.13  /* For generic assembly code: use macros to define operation/operand sizes. */
   14.14  #define __OS          "l"  /* Operation Suffix */
   14.15  #define __OP          "e"  /* Operand Prefix */
   14.16 -#define __FIXUP_ALIGN ".align 4"
   14.17 -#define __FIXUP_WORD  ".long"
   14.18  
   14.19  #endif /* __i386__ */
   14.20  
    15.1 --- a/xen/include/asm-x86/hvm/vmx/vmx.h	Fri Dec 24 08:46:46 2010 +0000
    15.2 +++ b/xen/include/asm-x86/hvm/vmx/vmx.h	Fri Dec 24 08:47:23 2010 +0000
    15.3 @@ -22,6 +22,7 @@
    15.4  #include <xen/sched.h>
    15.5  #include <asm/types.h>
    15.6  #include <asm/regs.h>
    15.7 +#include <asm/asm_defns.h>
    15.8  #include <asm/processor.h>
    15.9  #include <asm/i387.h>
   15.10  #include <asm/hvm/support.h>
   15.11 @@ -341,10 +342,7 @@ static inline void __invvpid(int type, u
   15.12      asm volatile ( "1: " INVVPID_OPCODE MODRM_EAX_08
   15.13                     /* CF==1 or ZF==1 --> crash (ud2) */
   15.14                     "ja 2f ; ud2 ; 2:\n"
   15.15 -                   ".section __ex_table,\"a\"\n"
   15.16 -                   "    "__FIXUP_ALIGN"\n"
   15.17 -                   "    "__FIXUP_WORD" 1b,2b\n"
   15.18 -                   ".previous"
   15.19 +                   _ASM_EXTABLE(1b, 2b)
   15.20                     :
   15.21                     : "a" (&operand), "c" (type)
   15.22                     : "memory" );
   15.23 @@ -404,10 +402,7 @@ static inline int __vmxon(u64 addr)
   15.24          ".section .fixup,\"ax\"\n"
   15.25          "3: sub $2,%0 ; jmp 2b\n"    /* #UD or #GP --> rc = -2 */
   15.26          ".previous\n"
   15.27 -        ".section __ex_table,\"a\"\n"
   15.28 -        "   "__FIXUP_ALIGN"\n"
   15.29 -        "   "__FIXUP_WORD" 1b,3b\n"
   15.30 -        ".previous\n"
   15.31 +        _ASM_EXTABLE(1b, 3b)
   15.32          : "=q" (rc)
   15.33          : "0" (0), "a" (&addr)
   15.34          : "memory");
    16.1 --- a/xen/include/asm-x86/msr.h	Fri Dec 24 08:46:46 2010 +0000
    16.2 +++ b/xen/include/asm-x86/msr.h	Fri Dec 24 08:47:23 2010 +0000
    16.3 @@ -8,6 +8,7 @@
    16.4  #include <xen/types.h>
    16.5  #include <xen/percpu.h>
    16.6  #include <xen/errno.h>
    16.7 +#include <asm/asm_defns.h>
    16.8  
    16.9  #define rdmsr(msr,val1,val2) \
   16.10       __asm__ __volatile__("rdmsr" \
   16.11 @@ -44,10 +45,7 @@ static inline void wrmsrl(unsigned int m
   16.12          "3: xorl %0,%0\n; xorl %1,%1\n" \
   16.13          "   movl %5,%2\n; jmp 2b\n" \
   16.14          ".previous\n" \
   16.15 -        ".section __ex_table,\"a\"\n" \
   16.16 -        "   "__FIXUP_ALIGN"\n" \
   16.17 -        "   "__FIXUP_WORD" 1b,3b\n" \
   16.18 -        ".previous\n" \
   16.19 +        _ASM_EXTABLE(1b, 3b) \
   16.20          : "=a" (lo), "=d" (hi), "=&r" (_rc) \
   16.21          : "c" (msr), "2" (0), "i" (-EFAULT)); \
   16.22      val = lo | ((uint64_t)hi << 32); \
   16.23 @@ -66,10 +64,7 @@ static inline int wrmsr_safe(unsigned in
   16.24          ".section .fixup,\"ax\"\n"
   16.25          "3: movl %5,%0\n; jmp 2b\n"
   16.26          ".previous\n"
   16.27 -        ".section __ex_table,\"a\"\n"
   16.28 -        "   "__FIXUP_ALIGN"\n"
   16.29 -        "   "__FIXUP_WORD" 1b,3b\n"
   16.30 -        ".previous\n"
   16.31 +        _ASM_EXTABLE(1b, 3b)
   16.32          : "=&r" (_rc)
   16.33          : "c" (msr), "a" (lo), "d" (hi), "0" (0), "i" (-EFAULT));
   16.34      return _rc;
    17.1 --- a/xen/include/asm-x86/uaccess.h	Fri Dec 24 08:46:46 2010 +0000
    17.2 +++ b/xen/include/asm-x86/uaccess.h	Fri Dec 24 08:47:23 2010 +0000
    17.3 @@ -6,6 +6,7 @@
    17.4  #include <xen/compiler.h>
    17.5  #include <xen/errno.h>
    17.6  #include <xen/prefetch.h>
    17.7 +#include <asm/asm_defns.h>
    17.8  #include <asm/page.h>
    17.9  
   17.10  #ifdef __x86_64__
   17.11 @@ -155,10 +156,7 @@ struct __large_struct { unsigned long bu
   17.12  		"3:	mov %3,%0\n"					\
   17.13  		"	jmp 2b\n"					\
   17.14  		".previous\n"						\
   17.15 -		".section __ex_table,\"a\"\n"				\
   17.16 -		"	"__FIXUP_ALIGN"\n"				\
   17.17 -		"	"__FIXUP_WORD" 1b,3b\n"				\
   17.18 -		".previous"						\
   17.19 +		_ASM_EXTABLE(1b, 3b)					\
   17.20  		: "=r"(err)						\
   17.21  		: ltype (x), "m"(__m(addr)), "i"(errret), "0"(err))
   17.22  
   17.23 @@ -171,10 +169,7 @@ struct __large_struct { unsigned long bu
   17.24  		"	xor"itype" %"rtype"1,%"rtype"1\n"		\
   17.25  		"	jmp 2b\n"					\
   17.26  		".previous\n"						\
   17.27 -		".section __ex_table,\"a\"\n"				\
   17.28 -		"	"__FIXUP_ALIGN"\n"				\
   17.29 -		"	"__FIXUP_WORD" 1b,3b\n"				\
   17.30 -		".previous"						\
   17.31 +		_ASM_EXTABLE(1b, 3b)					\
   17.32  		: "=r"(err), ltype (x)					\
   17.33  		: "m"(__m(addr)), "i"(errret), "0"(err))
   17.34  
   17.35 @@ -272,7 +267,7 @@ static always_inline unsigned long
   17.36  
   17.37  struct exception_table_entry
   17.38  {
   17.39 -	unsigned long insn, fixup;
   17.40 +	s32 addr, cont;
   17.41  };
   17.42  
   17.43  extern unsigned long search_exception_table(unsigned long);
    18.1 --- a/xen/include/asm-x86/x86_32/asm_defns.h	Fri Dec 24 08:46:46 2010 +0000
    18.2 +++ b/xen/include/asm-x86/x86_32/asm_defns.h	Fri Dec 24 08:47:23 2010 +0000
    18.3 @@ -153,4 +153,10 @@ STR(IRQ) #nr "_interrupt:\n\t"          
    18.4          GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg)     \
    18.5          movl (reg),reg;
    18.6  
    18.7 +#ifdef __ASSEMBLY__
    18.8 +# define _ASM_EX(p) p
    18.9 +#else
   18.10 +# define _ASM_EX(p) #p
   18.11 +#endif
   18.12 +
   18.13  #endif /* __X86_32_ASM_DEFNS_H__ */
    19.1 --- a/xen/include/asm-x86/x86_32/system.h	Fri Dec 24 08:46:46 2010 +0000
    19.2 +++ b/xen/include/asm-x86/x86_32/system.h	Fri Dec 24 08:47:23 2010 +0000
    19.3 @@ -49,10 +49,7 @@ static always_inline unsigned long long 
    19.4          "3:     movl $1,%1\n"                                           \
    19.5          "       jmp 2b\n"                                               \
    19.6          ".previous\n"                                                   \
    19.7 -        ".section __ex_table,\"a\"\n"                                   \
    19.8 -        "       .align 4\n"                                             \
    19.9 -        "       .long 1b,3b\n"                                          \
   19.10 -        ".previous"                                                     \
   19.11 +        _ASM_EXTABLE(1b, 3b)                                            \
   19.12          : "=a" (_o), "=r" (_rc)                                         \
   19.13          : _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
   19.14          : "memory");
   19.15 @@ -78,10 +75,7 @@ static always_inline unsigned long long 
   19.16              "3:     movl $1,%1\n"                                       \
   19.17              "       jmp 2b\n"                                           \
   19.18              ".previous\n"                                               \
   19.19 -            ".section __ex_table,\"a\"\n"                               \
   19.20 -            "       .align 4\n"                                         \
   19.21 -            "       .long 1b,3b\n"                                      \
   19.22 -            ".previous"                                                 \
   19.23 +            _ASM_EXTABLE(1b, 3b)                                        \
   19.24              : "=A" (_o), "=r" (_rc)                                     \
   19.25              : "c" ((u32)((u64)(_n)>>32)), "b" ((u32)(_n)),              \
   19.26                "m" (*__xg((volatile void *)(_p))), "0" (_o), "1" (0)     \
    20.1 --- a/xen/include/asm-x86/x86_32/uaccess.h	Fri Dec 24 08:46:46 2010 +0000
    20.2 +++ b/xen/include/asm-x86/x86_32/uaccess.h	Fri Dec 24 08:47:23 2010 +0000
    20.3 @@ -33,11 +33,8 @@ extern void __uaccess_var_not_u64(void);
    20.4  		"4:	movl %3,%0\n"				\
    20.5  		"	jmp 3b\n"				\
    20.6  		".previous\n"					\
    20.7 -		".section __ex_table,\"a\"\n"			\
    20.8 -		"	.align 4\n"				\
    20.9 -		"	.long 1b,4b\n"				\
   20.10 -		"	.long 2b,4b\n"				\
   20.11 -		".previous"					\
   20.12 +		_ASM_EXTABLE(1b, 4b)				\
   20.13 +		_ASM_EXTABLE(2b, 4b)				\
   20.14  		: "=r"(retval)					\
   20.15  		: "A" (x), "r" (addr), "i"(errret), "0"(retval))
   20.16  
   20.17 @@ -65,11 +62,8 @@ do {									\
   20.18  		"	xorl %%edx,%%edx\n"			\
   20.19  		"	jmp 3b\n"				\
   20.20  		".previous\n"					\
   20.21 -		".section __ex_table,\"a\"\n"			\
   20.22 -		"	.align 4\n"				\
   20.23 -		"	.long 1b,4b\n"				\
   20.24 -		"	.long 2b,4b\n"				\
   20.25 -		".previous"					\
   20.26 +		_ASM_EXTABLE(1b, 4b)				\
   20.27 +		_ASM_EXTABLE(2b, 4b)				\
   20.28  		: "=r" (retval), "=&A" (x)			\
   20.29  		: "r" (addr), "i"(errret), "0"(retval))
   20.30  
    21.1 --- a/xen/include/asm-x86/x86_64/asm_defns.h	Fri Dec 24 08:46:46 2010 +0000
    21.2 +++ b/xen/include/asm-x86/x86_64/asm_defns.h	Fri Dec 24 08:47:23 2010 +0000
    21.3 @@ -130,4 +130,10 @@ STR(IRQ) #nr "_interrupt:\n\t"          
    21.4          GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg)     \
    21.5          movq (reg),reg;
    21.6  
    21.7 +#ifdef __ASSEMBLY__
    21.8 +# define _ASM_EX(p) p-.
    21.9 +#else
   21.10 +# define _ASM_EX(p) #p "-."
   21.11 +#endif
   21.12 +
   21.13  #endif /* __X86_64_ASM_DEFNS_H__ */
    22.1 --- a/xen/include/asm-x86/x86_64/system.h	Fri Dec 24 08:46:46 2010 +0000
    22.2 +++ b/xen/include/asm-x86/x86_64/system.h	Fri Dec 24 08:47:23 2010 +0000
    22.3 @@ -19,10 +19,7 @@
    22.4          "3:     movl $1,%1\n"                                           \
    22.5          "       jmp 2b\n"                                               \
    22.6          ".previous\n"                                                   \
    22.7 -        ".section __ex_table,\"a\"\n"                                   \
    22.8 -        "       .align 8\n"                                             \
    22.9 -        "       .quad 1b,3b\n"                                          \
   22.10 -        ".previous"                                                     \
   22.11 +        _ASM_EXTABLE(1b, 3b)                                            \
   22.12          : "=a" (_o), "=r" (_rc)                                         \
   22.13          : _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
   22.14          : "memory");