blob: 219be88a59d207c683e3be85ebcabcd9e8174393 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
David Howellsf05e7982012-03-28 18:11:12 +01002#ifndef _ASM_X86_SPECIAL_INSNS_H
3#define _ASM_X86_SPECIAL_INSNS_H
4
5
6#ifdef __KERNEL__
7
Ross Zwisler719d3592015-02-19 10:37:28 -07008#include <asm/nops.h>
Kees Cook873d50d2019-06-17 21:55:02 -07009#include <asm/processor-flags.h>
10#include <linux/jump_label.h>
Ross Zwisler719d3592015-02-19 10:37:28 -070011
David Howellsf05e7982012-03-28 18:11:12 +010012/*
13 * Volatile isn't enough to prevent the compiler from reordering the
14 * read/write functions for the control registers and messing everything up.
15 * A memory clobber would solve the problem, but would prevent reordering of
16 * all loads stores around it, which can hurt performance. Solution is to
17 * use a variable and mimic reads and writes to it to enforce serialization
18 */
Jan Beulich1d10f6e2013-05-29 13:29:12 +010019extern unsigned long __force_order;
David Howellsf05e7982012-03-28 18:11:12 +010020
Thomas Gleixner7652ac92019-07-10 21:42:46 +020021void native_write_cr0(unsigned long val);
Kees Cook873d50d2019-06-17 21:55:02 -070022
David Howellsf05e7982012-03-28 18:11:12 +010023static inline unsigned long native_read_cr0(void)
24{
25 unsigned long val;
26 asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
27 return val;
28}
29
David Howellsf05e7982012-03-28 18:11:12 +010030static inline unsigned long native_read_cr2(void)
31{
32 unsigned long val;
33 asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
34 return val;
35}
36
37static inline void native_write_cr2(unsigned long val)
38{
39 asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
40}
41
Andy Lutomirski6c690ee2017-06-12 10:26:14 -070042static inline unsigned long __native_read_cr3(void)
David Howellsf05e7982012-03-28 18:11:12 +010043{
44 unsigned long val;
45 asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
46 return val;
47}
48
49static inline void native_write_cr3(unsigned long val)
50{
51 asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
52}
53
54static inline unsigned long native_read_cr4(void)
55{
56 unsigned long val;
David Howellsf05e7982012-03-28 18:11:12 +010057#ifdef CONFIG_X86_32
Andy Lutomirski1ef55be12016-09-29 12:48:12 -070058 /*
59 * This could fault if CR4 does not exist. Non-existent CR4
60 * is functionally equivalent to CR4 == 0. Keep it simple and pretend
61 * that CR4 == 0 on CPUs that don't have CR4.
62 */
David Howellsf05e7982012-03-28 18:11:12 +010063 asm volatile("1: mov %%cr4, %0\n"
64 "2:\n"
65 _ASM_EXTABLE(1b, 2b)
66 : "=r" (val), "=m" (__force_order) : "0" (0));
67#else
Andy Lutomirski1ef55be12016-09-29 12:48:12 -070068 /* CR4 always exists on x86_64. */
69 asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
David Howellsf05e7982012-03-28 18:11:12 +010070#endif
71 return val;
72}
73
Thomas Gleixner7652ac92019-07-10 21:42:46 +020074void native_write_cr4(unsigned long val);
David Howellsf05e7982012-03-28 18:11:12 +010075
76#ifdef CONFIG_X86_64
77static inline unsigned long native_read_cr8(void)
78{
79 unsigned long cr8;
80 asm volatile("movq %%cr8,%0" : "=r" (cr8));
81 return cr8;
82}
83
84static inline void native_write_cr8(unsigned long val)
85{
86 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
87}
88#endif
89
Dave Hansena927cb82016-02-12 13:02:15 -080090#ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
Sebastian Andrzej Siewiorc806e8872019-04-03 18:41:41 +020091static inline u32 rdpkru(void)
Dave Hansena927cb82016-02-12 13:02:15 -080092{
93 u32 ecx = 0;
94 u32 edx, pkru;
95
96 /*
97 * "rdpkru" instruction. Places PKRU contents in to EAX,
98 * clears EDX and requires that ecx=0.
99 */
100 asm volatile(".byte 0x0f,0x01,0xee\n\t"
101 : "=a" (pkru), "=d" (edx)
102 : "c" (ecx));
103 return pkru;
104}
Xiao Guangrong9e901992016-03-22 16:51:17 +0800105
Sebastian Andrzej Siewiorc806e8872019-04-03 18:41:41 +0200106static inline void wrpkru(u32 pkru)
Xiao Guangrong9e901992016-03-22 16:51:17 +0800107{
108 u32 ecx = 0, edx = 0;
109
110 /*
111 * "wrpkru" instruction. Loads contents in EAX to PKRU,
112 * requires that ecx = edx = 0.
113 */
114 asm volatile(".byte 0x0f,0x01,0xef\n\t"
115 : : "a" (pkru), "c"(ecx), "d"(edx));
116}
Sebastian Andrzej Siewiorc806e8872019-04-03 18:41:41 +0200117
118static inline void __write_pkru(u32 pkru)
119{
Sebastian Andrzej Siewior577ff462019-04-03 18:41:42 +0200120 /*
121 * WRPKRU is relatively expensive compared to RDPKRU.
122 * Avoid WRPKRU when it would not change the value.
123 */
124 if (pkru == rdpkru())
125 return;
126
Sebastian Andrzej Siewiorc806e8872019-04-03 18:41:41 +0200127 wrpkru(pkru);
128}
129
Dave Hansena927cb82016-02-12 13:02:15 -0800130#else
Sebastian Andrzej Siewiorc806e8872019-04-03 18:41:41 +0200131static inline u32 rdpkru(void)
Dave Hansena927cb82016-02-12 13:02:15 -0800132{
133 return 0;
134}
Xiao Guangrong9e901992016-03-22 16:51:17 +0800135
136static inline void __write_pkru(u32 pkru)
137{
138}
Dave Hansena927cb82016-02-12 13:02:15 -0800139#endif
140
David Howellsf05e7982012-03-28 18:11:12 +0100141static inline void native_wbinvd(void)
142{
143 asm volatile("wbinvd": : :"memory");
144}
145
Andi Kleen277d5b42013-08-05 15:02:43 -0700146extern asmlinkage void native_load_gs_index(unsigned);
David Howellsf05e7982012-03-28 18:11:12 +0100147
Juergen Gross87930012017-09-04 12:25:27 +0200148static inline unsigned long __read_cr4(void)
149{
150 return native_read_cr4();
151}
152
Juergen Grossfdc02692018-08-28 09:40:25 +0200153#ifdef CONFIG_PARAVIRT_XXL
David Howellsf05e7982012-03-28 18:11:12 +0100154#include <asm/paravirt.h>
Juergen Grossfdc02692018-08-28 09:40:25 +0200155#else
David Howellsf05e7982012-03-28 18:11:12 +0100156
157static inline unsigned long read_cr0(void)
158{
159 return native_read_cr0();
160}
161
162static inline void write_cr0(unsigned long x)
163{
164 native_write_cr0(x);
165}
166
167static inline unsigned long read_cr2(void)
168{
169 return native_read_cr2();
170}
171
172static inline void write_cr2(unsigned long x)
173{
174 native_write_cr2(x);
175}
176
Andy Lutomirski6c690ee2017-06-12 10:26:14 -0700177/*
178 * Careful! CR3 contains more than just an address. You probably want
179 * read_cr3_pa() instead.
180 */
181static inline unsigned long __read_cr3(void)
David Howellsf05e7982012-03-28 18:11:12 +0100182{
Andy Lutomirski6c690ee2017-06-12 10:26:14 -0700183 return __native_read_cr3();
David Howellsf05e7982012-03-28 18:11:12 +0100184}
185
186static inline void write_cr3(unsigned long x)
187{
188 native_write_cr3(x);
189}
190
Andy Lutomirski1e02ce42014-10-24 15:58:08 -0700191static inline void __write_cr4(unsigned long x)
David Howellsf05e7982012-03-28 18:11:12 +0100192{
193 native_write_cr4(x);
194}
195
196static inline void wbinvd(void)
197{
198 native_wbinvd();
199}
200
201#ifdef CONFIG_X86_64
202
203static inline unsigned long read_cr8(void)
204{
205 return native_read_cr8();
206}
207
208static inline void write_cr8(unsigned long x)
209{
210 native_write_cr8(x);
211}
212
213static inline void load_gs_index(unsigned selector)
214{
215 native_load_gs_index(selector);
216}
217
218#endif
219
Juergen Grossfdc02692018-08-28 09:40:25 +0200220#endif /* CONFIG_PARAVIRT_XXL */
David Howellsf05e7982012-03-28 18:11:12 +0100221
David Howellsf05e7982012-03-28 18:11:12 +0100222static inline void clflush(volatile void *__p)
223{
224 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
225}
226
Ross Zwisler171699f2014-02-26 12:06:49 -0700227static inline void clflushopt(volatile void *__p)
228{
229 alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0",
230 ".byte 0x66; clflush %P0",
231 X86_FEATURE_CLFLUSHOPT,
232 "+m" (*(volatile char __force *)__p));
233}
234
Ross Zwislerd9dc64f2015-01-27 09:53:51 -0700235static inline void clwb(volatile void *__p)
236{
237 volatile struct { char x[64]; } *p = __p;
238
239 asm volatile(ALTERNATIVE_2(
240 ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])",
241 ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */
242 X86_FEATURE_CLFLUSHOPT,
243 ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */
244 X86_FEATURE_CLWB)
245 : [p] "+m" (*p)
246 : [pax] "a" (p));
247}
248
David Howellsf05e7982012-03-28 18:11:12 +0100249#define nop() asm volatile ("nop")
250
251
252#endif /* __KERNEL__ */
253
254#endif /* _ASM_X86_SPECIAL_INSNS_H */