Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 2 | #ifndef _ASM_X86_SPECIAL_INSNS_H |
| 3 | #define _ASM_X86_SPECIAL_INSNS_H |
| 4 | |
| 5 | |
| 6 | #ifdef __KERNEL__ |
| 7 | |
Ross Zwisler | 719d359 | 2015-02-19 10:37:28 -0700 | [diff] [blame] | 8 | #include <asm/nops.h> |
Kees Cook | 873d50d | 2019-06-17 21:55:02 -0700 | [diff] [blame] | 9 | #include <asm/processor-flags.h> |
Thomas Gleixner | 410367e | 2020-03-04 23:32:15 +0100 | [diff] [blame] | 10 | #include <linux/irqflags.h> |
Kees Cook | 873d50d | 2019-06-17 21:55:02 -0700 | [diff] [blame] | 11 | #include <linux/jump_label.h> |
Ross Zwisler | 719d359 | 2015-02-19 10:37:28 -0700 | [diff] [blame] | 12 | |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 13 | /* |
| 14 | * Volatile isn't enough to prevent the compiler from reordering the |
| 15 | * read/write functions for the control registers and messing everything up. |
| 16 | * A memory clobber would solve the problem, but would prevent reordering of |
| 17 | * all loads stores around it, which can hurt performance. Solution is to |
| 18 | * use a variable and mimic reads and writes to it to enforce serialization |
| 19 | */ |
Jan Beulich | 1d10f6e | 2013-05-29 13:29:12 +0100 | [diff] [blame] | 20 | extern unsigned long __force_order; |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 21 | |
Thomas Gleixner | 7652ac9 | 2019-07-10 21:42:46 +0200 | [diff] [blame] | 22 | void native_write_cr0(unsigned long val); |
Kees Cook | 873d50d | 2019-06-17 21:55:02 -0700 | [diff] [blame] | 23 | |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 24 | static inline unsigned long native_read_cr0(void) |
| 25 | { |
| 26 | unsigned long val; |
| 27 | asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order)); |
| 28 | return val; |
| 29 | } |
| 30 | |
Peter Zijlstra | 2823e83 | 2020-06-03 13:40:22 +0200 | [diff] [blame] | 31 | static __always_inline unsigned long native_read_cr2(void) |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 32 | { |
| 33 | unsigned long val; |
| 34 | asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order)); |
| 35 | return val; |
| 36 | } |
| 37 | |
Peter Zijlstra | 2823e83 | 2020-06-03 13:40:22 +0200 | [diff] [blame] | 38 | static __always_inline void native_write_cr2(unsigned long val) |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 39 | { |
| 40 | asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order)); |
| 41 | } |
| 42 | |
Andy Lutomirski | 6c690ee | 2017-06-12 10:26:14 -0700 | [diff] [blame] | 43 | static inline unsigned long __native_read_cr3(void) |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 44 | { |
| 45 | unsigned long val; |
| 46 | asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order)); |
| 47 | return val; |
| 48 | } |
| 49 | |
| 50 | static inline void native_write_cr3(unsigned long val) |
| 51 | { |
| 52 | asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order)); |
| 53 | } |
| 54 | |
| 55 | static inline unsigned long native_read_cr4(void) |
| 56 | { |
| 57 | unsigned long val; |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 58 | #ifdef CONFIG_X86_32 |
Andy Lutomirski | 1ef55be1 | 2016-09-29 12:48:12 -0700 | [diff] [blame] | 59 | /* |
| 60 | * This could fault if CR4 does not exist. Non-existent CR4 |
| 61 | * is functionally equivalent to CR4 == 0. Keep it simple and pretend |
| 62 | * that CR4 == 0 on CPUs that don't have CR4. |
| 63 | */ |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 64 | asm volatile("1: mov %%cr4, %0\n" |
| 65 | "2:\n" |
| 66 | _ASM_EXTABLE(1b, 2b) |
| 67 | : "=r" (val), "=m" (__force_order) : "0" (0)); |
| 68 | #else |
Andy Lutomirski | 1ef55be1 | 2016-09-29 12:48:12 -0700 | [diff] [blame] | 69 | /* CR4 always exists on x86_64. */ |
| 70 | asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 71 | #endif |
| 72 | return val; |
| 73 | } |
| 74 | |
Thomas Gleixner | 7652ac9 | 2019-07-10 21:42:46 +0200 | [diff] [blame] | 75 | void native_write_cr4(unsigned long val); |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 76 | |
Dave Hansen | a927cb8 | 2016-02-12 13:02:15 -0800 | [diff] [blame] | 77 | #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS |
Sebastian Andrzej Siewior | c806e887 | 2019-04-03 18:41:41 +0200 | [diff] [blame] | 78 | static inline u32 rdpkru(void) |
Dave Hansen | a927cb8 | 2016-02-12 13:02:15 -0800 | [diff] [blame] | 79 | { |
| 80 | u32 ecx = 0; |
| 81 | u32 edx, pkru; |
| 82 | |
| 83 | /* |
| 84 | * "rdpkru" instruction. Places PKRU contents in to EAX, |
| 85 | * clears EDX and requires that ecx=0. |
| 86 | */ |
| 87 | asm volatile(".byte 0x0f,0x01,0xee\n\t" |
| 88 | : "=a" (pkru), "=d" (edx) |
| 89 | : "c" (ecx)); |
| 90 | return pkru; |
| 91 | } |
Xiao Guangrong | 9e90199 | 2016-03-22 16:51:17 +0800 | [diff] [blame] | 92 | |
Sebastian Andrzej Siewior | c806e887 | 2019-04-03 18:41:41 +0200 | [diff] [blame] | 93 | static inline void wrpkru(u32 pkru) |
Xiao Guangrong | 9e90199 | 2016-03-22 16:51:17 +0800 | [diff] [blame] | 94 | { |
| 95 | u32 ecx = 0, edx = 0; |
| 96 | |
| 97 | /* |
| 98 | * "wrpkru" instruction. Loads contents in EAX to PKRU, |
| 99 | * requires that ecx = edx = 0. |
| 100 | */ |
| 101 | asm volatile(".byte 0x0f,0x01,0xef\n\t" |
| 102 | : : "a" (pkru), "c"(ecx), "d"(edx)); |
| 103 | } |
Sebastian Andrzej Siewior | c806e887 | 2019-04-03 18:41:41 +0200 | [diff] [blame] | 104 | |
| 105 | static inline void __write_pkru(u32 pkru) |
| 106 | { |
Sebastian Andrzej Siewior | 577ff46 | 2019-04-03 18:41:42 +0200 | [diff] [blame] | 107 | /* |
| 108 | * WRPKRU is relatively expensive compared to RDPKRU. |
| 109 | * Avoid WRPKRU when it would not change the value. |
| 110 | */ |
| 111 | if (pkru == rdpkru()) |
| 112 | return; |
| 113 | |
Sebastian Andrzej Siewior | c806e887 | 2019-04-03 18:41:41 +0200 | [diff] [blame] | 114 | wrpkru(pkru); |
| 115 | } |
| 116 | |
Dave Hansen | a927cb8 | 2016-02-12 13:02:15 -0800 | [diff] [blame] | 117 | #else |
Sebastian Andrzej Siewior | c806e887 | 2019-04-03 18:41:41 +0200 | [diff] [blame] | 118 | static inline u32 rdpkru(void) |
Dave Hansen | a927cb8 | 2016-02-12 13:02:15 -0800 | [diff] [blame] | 119 | { |
| 120 | return 0; |
| 121 | } |
Xiao Guangrong | 9e90199 | 2016-03-22 16:51:17 +0800 | [diff] [blame] | 122 | |
| 123 | static inline void __write_pkru(u32 pkru) |
| 124 | { |
| 125 | } |
Dave Hansen | a927cb8 | 2016-02-12 13:02:15 -0800 | [diff] [blame] | 126 | #endif |
| 127 | |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 128 | static inline void native_wbinvd(void) |
| 129 | { |
| 130 | asm volatile("wbinvd": : :"memory"); |
| 131 | } |
| 132 | |
Thomas Gleixner | 410367e | 2020-03-04 23:32:15 +0100 | [diff] [blame] | 133 | extern asmlinkage void asm_load_gs_index(unsigned int selector); |
| 134 | |
| 135 | static inline void native_load_gs_index(unsigned int selector) |
| 136 | { |
| 137 | unsigned long flags; |
| 138 | |
| 139 | local_irq_save(flags); |
| 140 | asm_load_gs_index(selector); |
| 141 | local_irq_restore(flags); |
| 142 | } |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 143 | |
Juergen Gross | 8793001 | 2017-09-04 12:25:27 +0200 | [diff] [blame] | 144 | static inline unsigned long __read_cr4(void) |
| 145 | { |
| 146 | return native_read_cr4(); |
| 147 | } |
| 148 | |
Juergen Gross | fdc0269 | 2018-08-28 09:40:25 +0200 | [diff] [blame] | 149 | #ifdef CONFIG_PARAVIRT_XXL |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 150 | #include <asm/paravirt.h> |
Juergen Gross | fdc0269 | 2018-08-28 09:40:25 +0200 | [diff] [blame] | 151 | #else |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 152 | |
| 153 | static inline unsigned long read_cr0(void) |
| 154 | { |
| 155 | return native_read_cr0(); |
| 156 | } |
| 157 | |
| 158 | static inline void write_cr0(unsigned long x) |
| 159 | { |
| 160 | native_write_cr0(x); |
| 161 | } |
| 162 | |
Peter Zijlstra | 2823e83 | 2020-06-03 13:40:22 +0200 | [diff] [blame] | 163 | static __always_inline unsigned long read_cr2(void) |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 164 | { |
| 165 | return native_read_cr2(); |
| 166 | } |
| 167 | |
Peter Zijlstra | 2823e83 | 2020-06-03 13:40:22 +0200 | [diff] [blame] | 168 | static __always_inline void write_cr2(unsigned long x) |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 169 | { |
| 170 | native_write_cr2(x); |
| 171 | } |
| 172 | |
Andy Lutomirski | 6c690ee | 2017-06-12 10:26:14 -0700 | [diff] [blame] | 173 | /* |
| 174 | * Careful! CR3 contains more than just an address. You probably want |
| 175 | * read_cr3_pa() instead. |
| 176 | */ |
| 177 | static inline unsigned long __read_cr3(void) |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 178 | { |
Andy Lutomirski | 6c690ee | 2017-06-12 10:26:14 -0700 | [diff] [blame] | 179 | return __native_read_cr3(); |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 180 | } |
| 181 | |
| 182 | static inline void write_cr3(unsigned long x) |
| 183 | { |
| 184 | native_write_cr3(x); |
| 185 | } |
| 186 | |
Andy Lutomirski | 1e02ce4 | 2014-10-24 15:58:08 -0700 | [diff] [blame] | 187 | static inline void __write_cr4(unsigned long x) |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 188 | { |
| 189 | native_write_cr4(x); |
| 190 | } |
| 191 | |
| 192 | static inline void wbinvd(void) |
| 193 | { |
| 194 | native_wbinvd(); |
| 195 | } |
| 196 | |
| 197 | #ifdef CONFIG_X86_64 |
| 198 | |
Thomas Gleixner | 410367e | 2020-03-04 23:32:15 +0100 | [diff] [blame] | 199 | static inline void load_gs_index(unsigned int selector) |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 200 | { |
| 201 | native_load_gs_index(selector); |
| 202 | } |
| 203 | |
| 204 | #endif |
| 205 | |
Juergen Gross | fdc0269 | 2018-08-28 09:40:25 +0200 | [diff] [blame] | 206 | #endif /* CONFIG_PARAVIRT_XXL */ |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 207 | |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 208 | static inline void clflush(volatile void *__p) |
| 209 | { |
| 210 | asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); |
| 211 | } |
| 212 | |
Ross Zwisler | 171699f | 2014-02-26 12:06:49 -0700 | [diff] [blame] | 213 | static inline void clflushopt(volatile void *__p) |
| 214 | { |
| 215 | alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0", |
| 216 | ".byte 0x66; clflush %P0", |
| 217 | X86_FEATURE_CLFLUSHOPT, |
| 218 | "+m" (*(volatile char __force *)__p)); |
| 219 | } |
| 220 | |
Ross Zwisler | d9dc64f | 2015-01-27 09:53:51 -0700 | [diff] [blame] | 221 | static inline void clwb(volatile void *__p) |
| 222 | { |
| 223 | volatile struct { char x[64]; } *p = __p; |
| 224 | |
| 225 | asm volatile(ALTERNATIVE_2( |
| 226 | ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])", |
| 227 | ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */ |
| 228 | X86_FEATURE_CLFLUSHOPT, |
| 229 | ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */ |
| 230 | X86_FEATURE_CLWB) |
| 231 | : [p] "+m" (*p) |
| 232 | : [pax] "a" (p)); |
| 233 | } |
| 234 | |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 235 | #define nop() asm volatile ("nop") |
| 236 | |
Ricardo Neri | bf9c912 | 2020-08-06 20:28:33 -0700 | [diff] [blame^] | 237 | static inline void serialize(void) |
| 238 | { |
| 239 | /* Instruction opcode for SERIALIZE; supported in binutils >= 2.35. */ |
| 240 | asm volatile(".byte 0xf, 0x1, 0xe8" ::: "memory"); |
| 241 | } |
| 242 | |
David Howells | f05e798 | 2012-03-28 18:11:12 +0100 | [diff] [blame] | 243 | #endif /* __KERNEL__ */ |
| 244 | |
| 245 | #endif /* _ASM_X86_SPECIAL_INSNS_H */ |