blob: 480edc3a5e03002dd6f0a0316477cbd7b0971cc8 [file] [log] [blame]
David Woodhouse76b04382018-01-11 21:46:25 +00001/* SPDX-License-Identifier: GPL-2.0 */
2
3#include <linux/stringify.h>
4#include <linux/linkage.h>
5#include <asm/dwarf2.h>
6#include <asm/cpufeatures.h>
7#include <asm/alternative-asm.h>
8#include <asm/export.h>
9#include <asm/nospec-branch.h>
Borislav Petkov1dde7412018-01-27 16:24:33 +000010#include <asm/bitsperlong.h>
David Woodhouse76b04382018-01-11 21:46:25 +000011
12.macro THUNK reg
Masami Hiramatsu736e80a2018-01-19 01:14:21 +090013 .section .text.__x86.indirect_thunk
David Woodhouse76b04382018-01-11 21:46:25 +000014
15ENTRY(__x86_indirect_thunk_\reg)
16 CFI_STARTPROC
17 JMP_NOSPEC %\reg
18 CFI_ENDPROC
19ENDPROC(__x86_indirect_thunk_\reg)
20.endm
21
22/*
23 * Despite being an assembler file we can't just use .irp here
24 * because __KSYM_DEPS__ only uses the C preprocessor and would
25 * only see one instance of "__x86_indirect_thunk_\reg" rather
26 * than one per register with the correct names. So we do it
27 * the simple and nasty way...
28 */
Masami Hiramatsuc1804a22018-01-19 01:14:51 +090029#define __EXPORT_THUNK(sym) _ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym)
30#define EXPORT_THUNK(reg) __EXPORT_THUNK(__x86_indirect_thunk_ ## reg)
David Woodhouse76b04382018-01-11 21:46:25 +000031#define GENERATE_THUNK(reg) THUNK reg ; EXPORT_THUNK(reg)
32
33GENERATE_THUNK(_ASM_AX)
34GENERATE_THUNK(_ASM_BX)
35GENERATE_THUNK(_ASM_CX)
36GENERATE_THUNK(_ASM_DX)
37GENERATE_THUNK(_ASM_SI)
38GENERATE_THUNK(_ASM_DI)
39GENERATE_THUNK(_ASM_BP)
David Woodhouse76b04382018-01-11 21:46:25 +000040#ifdef CONFIG_64BIT
41GENERATE_THUNK(r8)
42GENERATE_THUNK(r9)
43GENERATE_THUNK(r10)
44GENERATE_THUNK(r11)
45GENERATE_THUNK(r12)
46GENERATE_THUNK(r13)
47GENERATE_THUNK(r14)
48GENERATE_THUNK(r15)
49#endif
Borislav Petkov1dde7412018-01-27 16:24:33 +000050
51/*
52 * Fill the CPU return stack buffer.
53 *
54 * Each entry in the RSB, if used for a speculative 'ret', contains an
55 * infinite 'pause; lfence; jmp' loop to capture speculative execution.
56 *
57 * This is required in various cases for retpoline and IBRS-based
58 * mitigations for the Spectre variant 2 vulnerability. Sometimes to
59 * eliminate potentially bogus entries from the RSB, and sometimes
60 * purely to ensure that it doesn't get empty, which on some CPUs would
61 * allow predictions from other (unwanted!) sources to be used.
62 *
63 * Google experimented with loop-unrolling and this turned out to be
64 * the optimal version - two calls, each with their own speculation
65 * trap should their return address end up getting used, in a loop.
66 */
67.macro STUFF_RSB nr:req sp:req
68 mov $(\nr / 2), %_ASM_BX
69 .align 16
70771:
71 call 772f
72773: /* speculation trap */
73 pause
74 lfence
75 jmp 773b
76 .align 16
77772:
78 call 774f
79775: /* speculation trap */
80 pause
81 lfence
82 jmp 775b
83 .align 16
84774:
85 dec %_ASM_BX
86 jnz 771b
87 add $((BITS_PER_LONG/8) * \nr), \sp
88.endm
89
90#define RSB_FILL_LOOPS 16 /* To avoid underflow */
91
92ENTRY(__fill_rsb)
93 STUFF_RSB RSB_FILL_LOOPS, %_ASM_SP
94 ret
95END(__fill_rsb)
96EXPORT_SYMBOL_GPL(__fill_rsb)
97
98#define RSB_CLEAR_LOOPS 32 /* To forcibly overwrite all entries */
99
100ENTRY(__clear_rsb)
101 STUFF_RSB RSB_CLEAR_LOOPS, %_ASM_SP
102 ret
103END(__clear_rsb)
104EXPORT_SYMBOL_GPL(__clear_rsb)