debuggers.hg

view xen/arch/x86/acpi/wakeup_prot.S @ 19808:64a932c92a7c

x86: fix s3 resume on AMD CPUs

Avoid longjmp as it has different semantics than on Intel CPUs in long
mode. Also add a few comments and remove a pointless reload of DS.

Signed-off-by: Christoph Egger <Christoph.Egger@amd.com>
Signed-off-by: Keir Fraser <keir.fraser@eu.citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Tue Jun 16 14:19:34 2009 +0100 (2009-06-16)
parents 24379dde8ac4
children 3ffdb094c2c0
line source
1 .text
3 #include <xen/config.h>
4 #include <xen/multiboot.h>
5 #include <public/xen.h>
6 #include <asm/asm_defns.h>
7 #include <asm/desc.h>
8 #include <asm/page.h>
9 #include <asm/msr.h>
11 #if defined(__x86_64__)
13 .code64
15 #define GREG(x) %r##x
16 #define SAVED_GREG(x) saved_r##x(%rip)
17 #define DECLARE_GREG(x) saved_r##x: .quad 0
18 #define SAVE_GREG(x) movq GREG(x), SAVED_GREG(x)
19 #define LOAD_GREG(x) movq SAVED_GREG(x), GREG(x)
21 #define REF(x) x(%rip)
23 #define RDMSR(ind, m) \
24 xorq %rdx, %rdx; \
25 mov $ind, %ecx; \
26 rdmsr; \
27 shlq $0x20, %rdx; \
28 orq %rax, %rdx; \
29 movq %rdx, m(%rip);
31 #define WRMSR(ind, m) \
32 mov $ind, %ecx; \
33 movq m(%rip), %rdx; \
34 mov %edx, %eax; \
35 shrq $0x20, %rdx; \
36 wrmsr;
38 #else /* !defined(__x86_64__) */
40 .code32
42 #define GREG(x) %e##x
43 #define SAVED_GREG(x) saved_e##x
44 #define DECLARE_GREG(x) saved_e##x: .long 0
45 #define SAVE_GREG(x) movl GREG(x), SAVED_GREG(x)
46 #define LOAD_GREG(x) movl SAVED_GREG(x), GREG(x)
48 #define REF(x) x
50 #endif
52 ENTRY(do_suspend_lowlevel)
54 SAVE_GREG(sp)
55 SAVE_GREG(ax)
56 SAVE_GREG(bx)
57 SAVE_GREG(cx)
58 SAVE_GREG(dx)
59 SAVE_GREG(bp)
60 SAVE_GREG(si)
61 SAVE_GREG(di)
63 #if defined(__x86_64__)
65 SAVE_GREG(8) # save r8...r15
66 SAVE_GREG(9)
67 SAVE_GREG(10)
68 SAVE_GREG(11)
69 SAVE_GREG(12)
70 SAVE_GREG(13)
71 SAVE_GREG(14)
72 SAVE_GREG(15)
73 pushfq;
74 popq SAVED_GREG(flags)
76 mov %cr8, GREG(ax)
77 mov GREG(ax), REF(saved_cr8)
79 RDMSR(MSR_FS_BASE, saved_fs_base)
80 RDMSR(MSR_GS_BASE, saved_gs_base)
81 RDMSR(MSR_SHADOW_GS_BASE, saved_kernel_gs_base)
83 #else /* !defined(__x86_64__) */
85 pushfl;
86 popl SAVED_GREG(flags)
88 #endif
90 mov %ds, REF(saved_ds)
91 mov %es, REF(saved_es)
92 mov %fs, REF(saved_fs)
93 mov %gs, REF(saved_gs)
94 mov %ss, REF(saved_ss)
96 sgdt REF(saved_gdt)
97 sidt REF(saved_idt)
98 sldt REF(saved_ldt)
100 mov %cr0, GREG(ax)
101 mov GREG(ax), REF(saved_cr0)
103 mov %cr3, GREG(ax)
104 mov GREG(ax), REF(saved_cr3)
106 call save_rest_processor_state
108 #if defined(__x86_64__)
110 mov $3, %rdi
111 xor %eax, %eax
113 #else /* !defined(__x86_64__) */
115 push $3
117 #endif
119 /* enter sleep state physically */
120 call acpi_enter_sleep_state
121 jmp __ret_point
123 .align 16
124 .globl __ret_point
125 __ret_point:
127 /* mmu_cr4_features contains latest cr4 setting */
128 mov REF(mmu_cr4_features), GREG(ax)
129 mov GREG(ax), %cr4
131 mov REF(saved_cr3), GREG(ax)
132 mov GREG(ax), %cr3
134 mov REF(saved_cr0), GREG(ax)
135 mov GREG(ax), %cr0
137 lgdt REF(saved_gdt)
138 lidt REF(saved_idt)
139 lldt REF(saved_ldt)
141 mov REF(saved_ss), %ss
142 LOAD_GREG(sp)
144 #if defined(__x86_64__)
145 /* Reload code selector */
146 pushq $(__HYPERVISOR_CS64)
147 leaq 1f(%rip),%rax
148 pushq %rax
149 lretq
150 1:
152 mov REF(saved_cr8), %rax
153 mov %rax, %cr8
155 pushq SAVED_GREG(flags)
156 popfq
158 /* Idle vcpu doesn't need segment selectors reload, since
159 * those may contain stale value from other domains and
160 * reload may result page fault due to no matched gdt entry
161 */
162 mov $(STACK_SIZE - 8), %rax
163 or %rsp, %rax
164 and $~7, %rax
165 mov (%rax), %rax
166 mov 0x10(%rax), %rax
167 cmpw $0x7fff, (%rax)
168 je 1f
170 /* These selectors are from guest, and thus need reload */
171 mov REF(saved_ds), %ds
172 mov REF(saved_es), %es
173 mov REF(saved_fs), %fs
175 /* gs load is special */
176 mov REF(saved_gs), %rsi
177 mov $3, %rdi # SEGBASE_GS_USER_SEL
178 call do_set_segment_base
180 1:
181 # MSR restore
182 WRMSR(MSR_FS_BASE, saved_fs_base)
183 WRMSR(MSR_GS_BASE, saved_gs_base)
184 WRMSR(MSR_SHADOW_GS_BASE, saved_kernel_gs_base)
186 #else /* !defined(__x86_64__) */
188 pushl SAVED_GREG(flags)
189 popfl
191 /* No reload to fs/gs, which is saved in bottom stack already */
192 mov REF(saved_ds), %ds
193 mov REF(saved_es), %es
195 #endif
197 call restore_rest_processor_state
199 LOAD_GREG(bp)
200 LOAD_GREG(ax)
201 LOAD_GREG(bx)
202 LOAD_GREG(cx)
203 LOAD_GREG(dx)
204 LOAD_GREG(si)
205 LOAD_GREG(di)
206 #if defined(__x86_64__)
207 LOAD_GREG(8) # save r8...r15
208 LOAD_GREG(9)
209 LOAD_GREG(10)
210 LOAD_GREG(11)
211 LOAD_GREG(12)
212 LOAD_GREG(13)
213 LOAD_GREG(14)
214 LOAD_GREG(15)
215 #endif
216 ret
218 .data
219 .align 16
220 saved_ds: .word 0
221 saved_es: .word 0
222 saved_ss: .word 0
223 saved_gs: .word 0
224 saved_fs: .word 0
226 .align 4
227 .globl saved_magic
228 saved_magic: .long 0x9abcdef0
230 .align 8
231 DECLARE_GREG(sp)
232 DECLARE_GREG(bp)
233 DECLARE_GREG(ax)
234 DECLARE_GREG(bx)
235 DECLARE_GREG(cx)
236 DECLARE_GREG(dx)
237 DECLARE_GREG(si)
238 DECLARE_GREG(di)
239 DECLARE_GREG(flags)
241 #if defined(__x86_64__)
243 DECLARE_GREG(8)
244 DECLARE_GREG(9)
245 DECLARE_GREG(10)
246 DECLARE_GREG(11)
247 DECLARE_GREG(12)
248 DECLARE_GREG(13)
249 DECLARE_GREG(14)
250 DECLARE_GREG(15)
252 saved_gdt: .quad 0,0
253 saved_idt: .quad 0,0
254 saved_ldt: .quad 0,0
256 saved_cr0: .quad 0
257 saved_cr3: .quad 0
258 saved_cr8: .quad 0
260 saved_gs_base: .quad 0
261 saved_fs_base: .quad 0
262 saved_kernel_gs_base: .quad 0
264 #else /* !defined(__x86_64__) */
266 saved_gdt: .long 0,0
267 saved_idt: .long 0,0
268 saved_ldt: .long 0
270 saved_cr0: .long 0
271 saved_cr3: .long 0
273 #endif