/root/src/xen/xen/include/asm/amd.h
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * amd.h - AMD processor specific definitions |
3 | | */ |
4 | | |
5 | | #ifndef __AMD_H__ |
6 | | #define __AMD_H__ |
7 | | |
8 | | #include <asm/cpufeature.h> |
9 | | |
10 | | /* CPUID masked for use by AMD-V Extended Migration */ |
11 | | |
12 | | /* Family 0Fh, Revision C */ |
13 | | #define AMD_FEATURES_K8_REV_C_ECX 0 |
14 | | #define AMD_FEATURES_K8_REV_C_EDX ( \ |
15 | | cpufeat_mask(X86_FEATURE_FPU) | cpufeat_mask(X86_FEATURE_VME) | \ |
16 | | cpufeat_mask(X86_FEATURE_DE) | cpufeat_mask(X86_FEATURE_PSE) | \ |
17 | | cpufeat_mask(X86_FEATURE_TSC) | cpufeat_mask(X86_FEATURE_MSR) | \ |
18 | | cpufeat_mask(X86_FEATURE_PAE) | cpufeat_mask(X86_FEATURE_MCE) | \ |
19 | | cpufeat_mask(X86_FEATURE_CX8) | cpufeat_mask(X86_FEATURE_APIC) | \ |
20 | | cpufeat_mask(X86_FEATURE_SEP) | cpufeat_mask(X86_FEATURE_MTRR) | \ |
21 | | cpufeat_mask(X86_FEATURE_PGE) | cpufeat_mask(X86_FEATURE_MCA) | \ |
22 | | cpufeat_mask(X86_FEATURE_CMOV) | cpufeat_mask(X86_FEATURE_PAT) | \ |
23 | | cpufeat_mask(X86_FEATURE_PSE36) | cpufeat_mask(X86_FEATURE_CLFLUSH)| \ |
24 | | cpufeat_mask(X86_FEATURE_MMX) | cpufeat_mask(X86_FEATURE_FXSR) | \ |
25 | | cpufeat_mask(X86_FEATURE_SSE) | cpufeat_mask(X86_FEATURE_SSE2)) |
26 | | #define AMD_EXTFEATURES_K8_REV_C_ECX 0 |
27 | | #define AMD_EXTFEATURES_K8_REV_C_EDX ( \ |
28 | | cpufeat_mask(X86_FEATURE_FPU) | cpufeat_mask(X86_FEATURE_VME) | \ |
29 | | cpufeat_mask(X86_FEATURE_DE) | cpufeat_mask(X86_FEATURE_PSE) | \ |
30 | | cpufeat_mask(X86_FEATURE_TSC) | cpufeat_mask(X86_FEATURE_MSR) | \ |
31 | | cpufeat_mask(X86_FEATURE_PAE) | cpufeat_mask(X86_FEATURE_MCE) | \ |
32 | | cpufeat_mask(X86_FEATURE_CX8) | cpufeat_mask(X86_FEATURE_APIC) | \ |
33 | | cpufeat_mask(X86_FEATURE_SYSCALL) | cpufeat_mask(X86_FEATURE_MTRR) | \ |
34 | | cpufeat_mask(X86_FEATURE_PGE) | cpufeat_mask(X86_FEATURE_MCA) | \ |
35 | | cpufeat_mask(X86_FEATURE_CMOV) | cpufeat_mask(X86_FEATURE_PAT) | \ |
36 | | cpufeat_mask(X86_FEATURE_PSE36) | cpufeat_mask(X86_FEATURE_NX) | \ |
37 | | cpufeat_mask(X86_FEATURE_MMXEXT) | cpufeat_mask(X86_FEATURE_MMX) | \ |
38 | | cpufeat_mask(X86_FEATURE_FXSR) | cpufeat_mask(X86_FEATURE_LM) | \ |
39 | | cpufeat_mask(X86_FEATURE_3DNOWEXT) | cpufeat_mask(X86_FEATURE_3DNOW)) |
40 | | |
41 | | /* Family 0Fh, Revision D */ |
42 | | #define AMD_FEATURES_K8_REV_D_ECX AMD_FEATURES_K8_REV_C_ECX |
43 | | #define AMD_FEATURES_K8_REV_D_EDX AMD_FEATURES_K8_REV_C_EDX |
44 | | #define AMD_EXTFEATURES_K8_REV_D_ECX (AMD_EXTFEATURES_K8_REV_C_ECX |\ |
45 | | cpufeat_mask(X86_FEATURE_LAHF_LM)) |
46 | | #define AMD_EXTFEATURES_K8_REV_D_EDX (AMD_EXTFEATURES_K8_REV_C_EDX |\ |
47 | | cpufeat_mask(X86_FEATURE_FFXSR)) |
48 | | |
49 | | /* Family 0Fh, Revision E */ |
50 | | #define AMD_FEATURES_K8_REV_E_ECX (AMD_FEATURES_K8_REV_D_ECX | \ |
51 | | cpufeat_mask(X86_FEATURE_SSE3)) |
52 | | #define AMD_FEATURES_K8_REV_E_EDX (AMD_FEATURES_K8_REV_D_EDX | \ |
53 | | cpufeat_mask(X86_FEATURE_HTT)) |
54 | | #define AMD_EXTFEATURES_K8_REV_E_ECX (AMD_EXTFEATURES_K8_REV_D_ECX |\ |
55 | | cpufeat_mask(X86_FEATURE_CMP_LEGACY)) |
56 | | #define AMD_EXTFEATURES_K8_REV_E_EDX AMD_EXTFEATURES_K8_REV_D_EDX |
57 | | |
58 | | /* Family 0Fh, Revision F */ |
59 | | #define AMD_FEATURES_K8_REV_F_ECX (AMD_FEATURES_K8_REV_E_ECX | \ |
60 | | cpufeat_mask(X86_FEATURE_CX16)) |
61 | | #define AMD_FEATURES_K8_REV_F_EDX AMD_FEATURES_K8_REV_E_EDX |
62 | | #define AMD_EXTFEATURES_K8_REV_F_ECX (AMD_EXTFEATURES_K8_REV_E_ECX |\ |
63 | | cpufeat_mask(X86_FEATURE_SVM) | cpufeat_mask(X86_FEATURE_EXTAPIC) | \ |
64 | | cpufeat_mask(X86_FEATURE_CR8_LEGACY)) |
65 | | #define AMD_EXTFEATURES_K8_REV_F_EDX (AMD_EXTFEATURES_K8_REV_E_EDX |\ |
66 | | cpufeat_mask(X86_FEATURE_RDTSCP)) |
67 | | |
68 | | /* Family 0Fh, Revision G */ |
69 | | #define AMD_FEATURES_K8_REV_G_ECX AMD_FEATURES_K8_REV_F_ECX |
70 | | #define AMD_FEATURES_K8_REV_G_EDX AMD_FEATURES_K8_REV_F_EDX |
71 | | #define AMD_EXTFEATURES_K8_REV_G_ECX (AMD_EXTFEATURES_K8_REV_F_ECX |\ |
72 | | cpufeat_mask(X86_FEATURE_3DNOWPREFETCH)) |
73 | | #define AMD_EXTFEATURES_K8_REV_G_EDX AMD_EXTFEATURES_K8_REV_F_EDX |
74 | | |
75 | | /* Family 10h, Revision B */ |
76 | | #define AMD_FEATURES_FAM10h_REV_B_ECX (AMD_FEATURES_K8_REV_F_ECX | \ |
77 | | cpufeat_mask(X86_FEATURE_POPCNT) | cpufeat_mask(X86_FEATURE_MONITOR)) |
78 | | #define AMD_FEATURES_FAM10h_REV_B_EDX AMD_FEATURES_K8_REV_F_EDX |
79 | | #define AMD_EXTFEATURES_FAM10h_REV_B_ECX (AMD_EXTFEATURES_K8_REV_F_ECX |\ |
80 | | cpufeat_mask(X86_FEATURE_ABM) | cpufeat_mask(X86_FEATURE_SSE4A) | \ |
81 | | cpufeat_mask(X86_FEATURE_MISALIGNSSE) | cpufeat_mask(X86_FEATURE_OSVW) |\ |
82 | | cpufeat_mask(X86_FEATURE_IBS)) |
83 | | #define AMD_EXTFEATURES_FAM10h_REV_B_EDX (AMD_EXTFEATURES_K8_REV_F_EDX |\ |
84 | | cpufeat_mask(X86_FEATURE_PAGE1GB)) |
85 | | |
86 | | /* Family 10h, Revision C */ |
87 | | #define AMD_FEATURES_FAM10h_REV_C_ECX AMD_FEATURES_FAM10h_REV_B_ECX |
88 | | #define AMD_FEATURES_FAM10h_REV_C_EDX AMD_FEATURES_FAM10h_REV_B_EDX |
89 | | #define AMD_EXTFEATURES_FAM10h_REV_C_ECX (AMD_EXTFEATURES_FAM10h_REV_B_ECX |\ |
90 | | cpufeat_mask(X86_FEATURE_SKINIT) | cpufeat_mask(X86_FEATURE_WDT)) |
91 | | #define AMD_EXTFEATURES_FAM10h_REV_C_EDX AMD_EXTFEATURES_FAM10h_REV_B_EDX |
92 | | |
93 | | /* Family 11h, Revision B */ |
94 | | #define AMD_FEATURES_FAM11h_REV_B_ECX AMD_FEATURES_K8_REV_G_ECX |
95 | | #define AMD_FEATURES_FAM11h_REV_B_EDX AMD_FEATURES_K8_REV_G_EDX |
96 | | #define AMD_EXTFEATURES_FAM11h_REV_B_ECX (AMD_EXTFEATURES_K8_REV_G_ECX |\ |
97 | | cpufeat_mask(X86_FEATURE_SKINIT)) |
98 | | #define AMD_EXTFEATURES_FAM11h_REV_B_EDX AMD_EXTFEATURES_K8_REV_G_EDX |
99 | | |
100 | | /* AMD errata checking |
101 | | * |
102 | | * Errata are defined using the AMD_LEGACY_ERRATUM() or AMD_OSVW_ERRATUM() |
103 | | * macros. The latter is intended for newer errata that have an OSVW id |
104 | | * assigned, which it takes as first argument. Both take a variable number |
105 | | * of family-specific model-stepping ranges created by AMD_MODEL_RANGE(). |
106 | | * |
107 | | * Example 1: |
108 | | * #define AMD_ERRATUM_319 \ |
109 | | * AMD_LEGACY_ERRATUM(AMD_MODEL_RANGE(0x10, 0x2, 0x1, 0x4, 0x2), \ |
110 | | * AMD_MODEL_RANGE(0x10, 0x8, 0x0, 0x8, 0x0), \ |
111 | | * AMD_MODEL_RANGE(0x10, 0x9, 0x0, 0x9, 0x0)) |
112 | | * Example 2: |
113 | | * #define AMD_ERRATUM_400 \ |
114 | | * AMD_OSVW_ERRATUM(1, AMD_MODEL_RANGE(0xf, 0x41, 0x2, 0xff, 0xf), \ |
115 | | * AMD_MODEL_RANGE(0x10, 0x2, 0x1, 0xff, 0xf)) |
116 | | * |
117 | | */ |
118 | | |
119 | 0 | #define AMD_LEGACY_ERRATUM(...) -1 /* legacy */, __VA_ARGS__, 0 |
120 | 0 | #define AMD_OSVW_ERRATUM(osvw_id, ...) osvw_id, __VA_ARGS__, 0 |
121 | | #define AMD_MODEL_RANGE(f, m_start, s_start, m_end, s_end) \ |
122 | | ((f << 24) | (m_start << 16) | (s_start << 12) | (m_end << 4) | (s_end)) |
123 | 0 | #define AMD_MODEL_RANGE_FAMILY(range) (((range) >> 24) & 0xff) |
124 | 0 | #define AMD_MODEL_RANGE_START(range) (((range) >> 12) & 0xfff) |
125 | 0 | #define AMD_MODEL_RANGE_END(range) ((range) & 0xfff) |
126 | | |
127 | | #define AMD_ERRATUM_121 \ |
128 | 0 | AMD_LEGACY_ERRATUM(AMD_MODEL_RANGE(0x0f, 0x0, 0x0, 0x3f, 0xf)) |
129 | | |
130 | | #define AMD_ERRATUM_170 \ |
131 | 0 | AMD_LEGACY_ERRATUM(AMD_MODEL_RANGE(0x0f, 0x0, 0x0, 0x67, 0xf)) |
132 | | |
133 | | #define AMD_ERRATUM_383 \ |
134 | 0 | AMD_OSVW_ERRATUM(3, AMD_MODEL_RANGE(0x10, 0x2, 0x1, 0xff, 0xf), \ |
135 | 0 | AMD_MODEL_RANGE(0x12, 0x0, 0x0, 0x1, 0x0)) |
136 | | |
137 | | #define AMD_ERRATUM_573 \ |
138 | | AMD_LEGACY_ERRATUM(AMD_MODEL_RANGE(0x0f, 0x0, 0x0, 0xff, 0xf), \ |
139 | | AMD_MODEL_RANGE(0x10, 0x0, 0x0, 0xff, 0xf), \ |
140 | | AMD_MODEL_RANGE(0x11, 0x0, 0x0, 0xff, 0xf), \ |
141 | | AMD_MODEL_RANGE(0x12, 0x0, 0x0, 0xff, 0xf)) |
142 | | |
143 | | struct cpuinfo_x86; |
144 | | int cpu_has_amd_erratum(const struct cpuinfo_x86 *, int, ...); |
145 | | |
146 | | extern s8 opt_allow_unsafe; |
147 | | |
148 | | void fam10h_check_enable_mmcfg(void); |
149 | | void check_enable_amd_mmconf_dmi(void); |
150 | | |
151 | | #endif /* __AMD_H__ */ |