xref: /linux/arch/x86/include/asm/special_insns.h (revision ab520be8cd5d56867fc95cfbc34b90880faf1f9d)
1 #ifndef _ASM_X86_SPECIAL_INSNS_H
2 #define _ASM_X86_SPECIAL_INSNS_H
3 
4 
5 #ifdef __KERNEL__
6 
7 #include <asm/nops.h>
8 
9 /*
10  * Volatile isn't enough to prevent the compiler from reordering the
11  * read/write functions for the control registers and messing everything up.
12  * A memory clobber would solve the problem, but would prevent reordering of
13  * all loads stores around it, which can hurt performance. Solution is to
14  * use a variable and mimic reads and writes to it to enforce serialization
15  */
16 extern unsigned long __force_order;
17 
18 static inline unsigned long native_read_cr0(void)
19 {
20 	unsigned long val;
21 	asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
22 	return val;
23 }
24 
25 static inline void native_write_cr0(unsigned long val)
26 {
27 	asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
28 }
29 
30 static inline unsigned long native_read_cr2(void)
31 {
32 	unsigned long val;
33 	asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
34 	return val;
35 }
36 
37 static inline void native_write_cr2(unsigned long val)
38 {
39 	asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
40 }
41 
42 static inline unsigned long native_read_cr3(void)
43 {
44 	unsigned long val;
45 	asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
46 	return val;
47 }
48 
49 static inline void native_write_cr3(unsigned long val)
50 {
51 	asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
52 }
53 
54 static inline unsigned long native_read_cr4(void)
55 {
56 	unsigned long val;
57 #ifdef CONFIG_X86_32
58 	/*
59 	 * This could fault if CR4 does not exist.  Non-existent CR4
60 	 * is functionally equivalent to CR4 == 0.  Keep it simple and pretend
61 	 * that CR4 == 0 on CPUs that don't have CR4.
62 	 */
63 	asm volatile("1: mov %%cr4, %0\n"
64 		     "2:\n"
65 		     _ASM_EXTABLE(1b, 2b)
66 		     : "=r" (val), "=m" (__force_order) : "0" (0));
67 #else
68 	/* CR4 always exists on x86_64. */
69 	asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
70 #endif
71 	return val;
72 }
73 
74 static inline void native_write_cr4(unsigned long val)
75 {
76 	asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
77 }
78 
79 #ifdef CONFIG_X86_64
80 static inline unsigned long native_read_cr8(void)
81 {
82 	unsigned long cr8;
83 	asm volatile("movq %%cr8,%0" : "=r" (cr8));
84 	return cr8;
85 }
86 
87 static inline void native_write_cr8(unsigned long val)
88 {
89 	asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
90 }
91 #endif
92 
93 #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
94 static inline u32 __read_pkru(void)
95 {
96 	u32 ecx = 0;
97 	u32 edx, pkru;
98 
99 	/*
100 	 * "rdpkru" instruction.  Places PKRU contents in to EAX,
101 	 * clears EDX and requires that ecx=0.
102 	 */
103 	asm volatile(".byte 0x0f,0x01,0xee\n\t"
104 		     : "=a" (pkru), "=d" (edx)
105 		     : "c" (ecx));
106 	return pkru;
107 }
108 
109 static inline void __write_pkru(u32 pkru)
110 {
111 	u32 ecx = 0, edx = 0;
112 
113 	/*
114 	 * "wrpkru" instruction.  Loads contents in EAX to PKRU,
115 	 * requires that ecx = edx = 0.
116 	 */
117 	asm volatile(".byte 0x0f,0x01,0xef\n\t"
118 		     : : "a" (pkru), "c"(ecx), "d"(edx));
119 }
120 #else
121 static inline u32 __read_pkru(void)
122 {
123 	return 0;
124 }
125 
126 static inline void __write_pkru(u32 pkru)
127 {
128 }
129 #endif
130 
131 static inline void native_wbinvd(void)
132 {
133 	asm volatile("wbinvd": : :"memory");
134 }
135 
136 extern asmlinkage void native_load_gs_index(unsigned);
137 
138 #ifdef CONFIG_PARAVIRT
139 #include <asm/paravirt.h>
140 #else
141 
142 static inline unsigned long read_cr0(void)
143 {
144 	return native_read_cr0();
145 }
146 
147 static inline void write_cr0(unsigned long x)
148 {
149 	native_write_cr0(x);
150 }
151 
152 static inline unsigned long read_cr2(void)
153 {
154 	return native_read_cr2();
155 }
156 
157 static inline void write_cr2(unsigned long x)
158 {
159 	native_write_cr2(x);
160 }
161 
162 static inline unsigned long read_cr3(void)
163 {
164 	return native_read_cr3();
165 }
166 
167 static inline void write_cr3(unsigned long x)
168 {
169 	native_write_cr3(x);
170 }
171 
172 static inline unsigned long __read_cr4(void)
173 {
174 	return native_read_cr4();
175 }
176 
177 static inline void __write_cr4(unsigned long x)
178 {
179 	native_write_cr4(x);
180 }
181 
182 static inline void wbinvd(void)
183 {
184 	native_wbinvd();
185 }
186 
187 #ifdef CONFIG_X86_64
188 
189 static inline unsigned long read_cr8(void)
190 {
191 	return native_read_cr8();
192 }
193 
194 static inline void write_cr8(unsigned long x)
195 {
196 	native_write_cr8(x);
197 }
198 
199 static inline void load_gs_index(unsigned selector)
200 {
201 	native_load_gs_index(selector);
202 }
203 
204 #endif
205 
206 #endif/* CONFIG_PARAVIRT */
207 
208 static inline void clflush(volatile void *__p)
209 {
210 	asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
211 }
212 
213 static inline void clflushopt(volatile void *__p)
214 {
215 	alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0",
216 		       ".byte 0x66; clflush %P0",
217 		       X86_FEATURE_CLFLUSHOPT,
218 		       "+m" (*(volatile char __force *)__p));
219 }
220 
221 static inline void clwb(volatile void *__p)
222 {
223 	volatile struct { char x[64]; } *p = __p;
224 
225 	asm volatile(ALTERNATIVE_2(
226 		".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])",
227 		".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */
228 		X86_FEATURE_CLFLUSHOPT,
229 		".byte 0x66, 0x0f, 0xae, 0x30",  /* clwb (%%rax) */
230 		X86_FEATURE_CLWB)
231 		: [p] "+m" (*p)
232 		: [pax] "a" (p));
233 }
234 
235 #define nop() asm volatile ("nop")
236 
237 
238 #endif /* __KERNEL__ */
239 
240 #endif /* _ASM_X86_SPECIAL_INSNS_H */
241