blob: e322676039f4ec9e7e96d3d74f7f273392fb92e4 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Avi Kivity6aa8b732006-12-10 02:21:36 -08002#ifndef __SVM_H
3#define __SVM_H
4
David Howellsaf170c52012-12-14 22:37:13 +00005#include <uapi/asm/svm.h>
Babu Moger9780d512020-09-11 14:28:20 -05006#include <uapi/asm/kvm.h>
Xiao Guangrong26bf2642012-09-17 16:31:13 +08007
Babu Mogerc45ad722020-09-11 14:27:58 -05008/*
9 * 32-bit intercept words in the VMCB Control Area, starting
10 * at Byte offset 000h.
11 */
12
13enum intercept_words {
Babu Moger03bfeeb2020-09-11 14:28:05 -050014 INTERCEPT_CR = 0,
Babu Moger30abaa882020-09-11 14:28:12 -050015 INTERCEPT_DR,
Babu Moger9780d512020-09-11 14:28:20 -050016 INTERCEPT_EXCEPTION,
Babu Mogerc62e2e92020-09-11 14:28:28 -050017 INTERCEPT_WORD3,
18 INTERCEPT_WORD4,
Babu Moger4c44e8d2020-09-11 14:28:35 -050019 INTERCEPT_WORD5,
Babu Mogerc45ad722020-09-11 14:27:58 -050020 MAX_INTERCEPT,
21};
Xiao Guangrong26bf2642012-09-17 16:31:13 +080022
Avi Kivity6aa8b732006-12-10 02:21:36 -080023enum {
Babu Moger03bfeeb2020-09-11 14:28:05 -050024 /* Byte offset 000h (word 0) */
25 INTERCEPT_CR0_READ = 0,
26 INTERCEPT_CR3_READ = 3,
27 INTERCEPT_CR4_READ = 4,
28 INTERCEPT_CR8_READ = 8,
29 INTERCEPT_CR0_WRITE = 16,
30 INTERCEPT_CR3_WRITE = 16 + 3,
31 INTERCEPT_CR4_WRITE = 16 + 4,
32 INTERCEPT_CR8_WRITE = 16 + 8,
Babu Moger30abaa882020-09-11 14:28:12 -050033 /* Byte offset 004h (word 1) */
34 INTERCEPT_DR0_READ = 32,
35 INTERCEPT_DR1_READ,
36 INTERCEPT_DR2_READ,
37 INTERCEPT_DR3_READ,
38 INTERCEPT_DR4_READ,
39 INTERCEPT_DR5_READ,
40 INTERCEPT_DR6_READ,
41 INTERCEPT_DR7_READ,
42 INTERCEPT_DR0_WRITE = 48,
43 INTERCEPT_DR1_WRITE,
44 INTERCEPT_DR2_WRITE,
45 INTERCEPT_DR3_WRITE,
46 INTERCEPT_DR4_WRITE,
47 INTERCEPT_DR5_WRITE,
48 INTERCEPT_DR6_WRITE,
49 INTERCEPT_DR7_WRITE,
Babu Moger9780d512020-09-11 14:28:20 -050050 /* Byte offset 008h (word 2) */
51 INTERCEPT_EXCEPTION_OFFSET = 64,
Babu Mogerc62e2e92020-09-11 14:28:28 -050052 /* Byte offset 00Ch (word 3) */
53 INTERCEPT_INTR = 96,
Avi Kivity6aa8b732006-12-10 02:21:36 -080054 INTERCEPT_NMI,
55 INTERCEPT_SMI,
56 INTERCEPT_INIT,
57 INTERCEPT_VINTR,
58 INTERCEPT_SELECTIVE_CR0,
59 INTERCEPT_STORE_IDTR,
60 INTERCEPT_STORE_GDTR,
61 INTERCEPT_STORE_LDTR,
62 INTERCEPT_STORE_TR,
63 INTERCEPT_LOAD_IDTR,
64 INTERCEPT_LOAD_GDTR,
65 INTERCEPT_LOAD_LDTR,
66 INTERCEPT_LOAD_TR,
67 INTERCEPT_RDTSC,
68 INTERCEPT_RDPMC,
69 INTERCEPT_PUSHF,
70 INTERCEPT_POPF,
71 INTERCEPT_CPUID,
72 INTERCEPT_RSM,
73 INTERCEPT_IRET,
74 INTERCEPT_INTn,
75 INTERCEPT_INVD,
76 INTERCEPT_PAUSE,
77 INTERCEPT_HLT,
78 INTERCEPT_INVLPG,
79 INTERCEPT_INVLPGA,
80 INTERCEPT_IOIO_PROT,
81 INTERCEPT_MSR_PROT,
82 INTERCEPT_TASK_SWITCH,
83 INTERCEPT_FERR_FREEZE,
84 INTERCEPT_SHUTDOWN,
Babu Mogerc62e2e92020-09-11 14:28:28 -050085 /* Byte offset 010h (word 4) */
86 INTERCEPT_VMRUN = 128,
Avi Kivity6aa8b732006-12-10 02:21:36 -080087 INTERCEPT_VMMCALL,
88 INTERCEPT_VMLOAD,
89 INTERCEPT_VMSAVE,
90 INTERCEPT_STGI,
91 INTERCEPT_CLGI,
92 INTERCEPT_SKINIT,
93 INTERCEPT_RDTSCP,
94 INTERCEPT_ICEBP,
95 INTERCEPT_WBINVD,
Joerg Roedel916ce232007-03-21 19:47:00 +010096 INTERCEPT_MONITOR,
97 INTERCEPT_MWAIT,
98 INTERCEPT_MWAIT_COND,
Joerg Roedel81dd35d2010-12-07 17:15:06 +010099 INTERCEPT_XSETBV,
Jim Mattson0cb84102019-09-19 15:59:17 -0700100 INTERCEPT_RDPRU,
Tom Lendacky376c6d22020-12-10 11:10:06 -0600101 TRAP_EFER_WRITE,
102 TRAP_CR0_WRITE,
103 TRAP_CR1_WRITE,
104 TRAP_CR2_WRITE,
105 TRAP_CR3_WRITE,
106 TRAP_CR4_WRITE,
107 TRAP_CR5_WRITE,
108 TRAP_CR6_WRITE,
109 TRAP_CR7_WRITE,
110 TRAP_CR8_WRITE,
Babu Moger4c44e8d2020-09-11 14:28:35 -0500111 /* Byte offset 014h (word 5) */
112 INTERCEPT_INVLPGB = 160,
113 INTERCEPT_INVLPGB_ILLEGAL,
114 INTERCEPT_INVPCID,
115 INTERCEPT_MCOMMIT,
116 INTERCEPT_TLBSYNC,
Avi Kivity6aa8b732006-12-10 02:21:36 -0800117};
118
119
120struct __attribute__ ((__packed__)) vmcb_control_area {
Babu Mogerc45ad722020-09-11 14:27:58 -0500121 u32 intercepts[MAX_INTERCEPT];
Babu Mogerc62e2e92020-09-11 14:28:28 -0500122 u32 reserved_1[15 - MAX_INTERCEPT];
Babu Moger1d8fb442018-03-16 16:37:25 -0400123 u16 pause_filter_thresh;
Mark Langsdorf565d0992009-10-06 14:25:02 -0500124 u16 pause_filter_count;
Avi Kivity6aa8b732006-12-10 02:21:36 -0800125 u64 iopm_base_pa;
126 u64 msrpm_base_pa;
127 u64 tsc_offset;
128 u32 asid;
129 u8 tlb_ctl;
130 u8 reserved_2[3];
131 u32 int_ctl;
132 u32 int_vector;
133 u32 int_state;
134 u8 reserved_3[4];
135 u32 exit_code;
136 u32 exit_code_hi;
137 u64 exit_info_1;
138 u64 exit_info_2;
139 u32 exit_int_info;
140 u32 exit_int_info_err;
141 u64 nested_ctl;
Suravee Suthikulpanit3d5615e2016-05-04 14:09:45 -0500142 u64 avic_vapic_bar;
Tom Lendacky291bd202020-12-10 11:09:47 -0600143 u64 ghcb_gpa;
Avi Kivity6aa8b732006-12-10 02:21:36 -0800144 u32 event_inj;
145 u32 event_inj_err;
146 u64 nested_cr3;
Janakarajan Natarajan0dc92112017-07-06 15:50:45 -0500147 u64 virt_ext;
Roedel, Joerg8d28fec2010-12-03 13:15:21 +0100148 u32 clean;
149 u32 reserved_5;
Andre Przywara6bc31bd2010-04-11 23:07:28 +0200150 u64 next_rip;
Andre Przywaradc25e892010-12-21 11:12:07 +0100151 u8 insn_len;
152 u8 insn_bytes[15];
Suravee Suthikulpanit3d5615e2016-05-04 14:09:45 -0500153 u64 avic_backing_page; /* Offset 0xe0 */
154 u8 reserved_6[8]; /* Offset 0xe8 */
155 u64 avic_logical_id; /* Offset 0xf0 */
156 u64 avic_physical_id; /* Offset 0xf8 */
Tom Lendacky376c6d22020-12-10 11:10:06 -0600157 u8 reserved_7[8];
158 u64 vmsa_pa; /* Used for an SEV-ES guest */
Vineeth Pillai59d21d62021-06-03 15:14:37 +0000159 u8 reserved_8[720];
160 /*
161 * Offset 0x3e0, 32 bytes reserved
162 * for use by hypervisor/software.
163 */
164 u8 reserved_sw[32];
Avi Kivity6aa8b732006-12-10 02:21:36 -0800165};
166
167
168#define TLB_CONTROL_DO_NOTHING 0
169#define TLB_CONTROL_FLUSH_ALL_ASID 1
Joerg Roedel38e5e922010-12-03 15:25:16 +0100170#define TLB_CONTROL_FLUSH_ASID 3
171#define TLB_CONTROL_FLUSH_ASID_LOCAL 7
Avi Kivity6aa8b732006-12-10 02:21:36 -0800172
173#define V_TPR_MASK 0x0f
174
175#define V_IRQ_SHIFT 8
176#define V_IRQ_MASK (1 << V_IRQ_SHIFT)
177
Janakarajan Natarajan640bd6e2017-08-23 09:57:19 -0500178#define V_GIF_SHIFT 9
179#define V_GIF_MASK (1 << V_GIF_SHIFT)
180
Avi Kivity6aa8b732006-12-10 02:21:36 -0800181#define V_INTR_PRIO_SHIFT 16
182#define V_INTR_PRIO_MASK (0x0f << V_INTR_PRIO_SHIFT)
183
184#define V_IGN_TPR_SHIFT 20
185#define V_IGN_TPR_MASK (1 << V_IGN_TPR_SHIFT)
186
187#define V_INTR_MASKING_SHIFT 24
188#define V_INTR_MASKING_MASK (1 << V_INTR_MASKING_SHIFT)
189
Janakarajan Natarajan640bd6e2017-08-23 09:57:19 -0500190#define V_GIF_ENABLE_SHIFT 25
191#define V_GIF_ENABLE_MASK (1 << V_GIF_ENABLE_SHIFT)
192
Suravee Suthikulpanit44a95da2016-05-04 14:09:46 -0500193#define AVIC_ENABLE_SHIFT 31
194#define AVIC_ENABLE_MASK (1 << AVIC_ENABLE_SHIFT)
195
Janakarajan Natarajan8a77e902017-07-06 15:50:44 -0500196#define LBR_CTL_ENABLE_MASK BIT_ULL(0)
Janakarajan Natarajan89c8a492017-07-06 15:50:47 -0500197#define VIRTUAL_VMLOAD_VMSAVE_ENABLE_MASK BIT_ULL(1)
Janakarajan Natarajan8a77e902017-07-06 15:50:44 -0500198
Tom Lendackyf1c63662020-12-14 10:29:50 -0500199#define SVM_INTERRUPT_SHADOW_MASK BIT_ULL(0)
200#define SVM_GUEST_INTERRUPT_MASK BIT_ULL(1)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800201
202#define SVM_IOIO_STR_SHIFT 2
203#define SVM_IOIO_REP_SHIFT 3
204#define SVM_IOIO_SIZE_SHIFT 4
205#define SVM_IOIO_ASIZE_SHIFT 7
206
207#define SVM_IOIO_TYPE_MASK 1
208#define SVM_IOIO_STR_MASK (1 << SVM_IOIO_STR_SHIFT)
209#define SVM_IOIO_REP_MASK (1 << SVM_IOIO_REP_SHIFT)
210#define SVM_IOIO_SIZE_MASK (7 << SVM_IOIO_SIZE_SHIFT)
211#define SVM_IOIO_ASIZE_MASK (7 << SVM_IOIO_ASIZE_SHIFT)
212
Joerg Roedel4a810182010-02-24 18:59:15 +0100213#define SVM_VM_CR_VALID_MASK 0x001fULL
214#define SVM_VM_CR_SVM_LOCK_MASK 0x0008ULL
215#define SVM_VM_CR_SVM_DIS_MASK 0x0010ULL
216
Tom Lendackycea3a192017-12-04 10:57:24 -0600217#define SVM_NESTED_CTL_NP_ENABLE BIT(0)
Tom Lendackyba7c3392017-12-04 10:57:24 -0600218#define SVM_NESTED_CTL_SEV_ENABLE BIT(1)
Tom Lendacky376c6d22020-12-10 11:10:06 -0600219#define SVM_NESTED_CTL_SEV_ES_ENABLE BIT(2)
Tom Lendackycea3a192017-12-04 10:57:24 -0600220
Borislav Petkov976bc5e2020-09-07 15:15:05 +0200221struct vmcb_seg {
Avi Kivity6aa8b732006-12-10 02:21:36 -0800222 u16 selector;
223 u16 attrib;
224 u32 limit;
225 u64 base;
Borislav Petkov976bc5e2020-09-07 15:15:05 +0200226} __packed;
Avi Kivity6aa8b732006-12-10 02:21:36 -0800227
Borislav Petkov976bc5e2020-09-07 15:15:05 +0200228struct vmcb_save_area {
Avi Kivity6aa8b732006-12-10 02:21:36 -0800229 struct vmcb_seg es;
230 struct vmcb_seg cs;
231 struct vmcb_seg ss;
232 struct vmcb_seg ds;
233 struct vmcb_seg fs;
234 struct vmcb_seg gs;
235 struct vmcb_seg gdtr;
236 struct vmcb_seg ldtr;
237 struct vmcb_seg idtr;
238 struct vmcb_seg tr;
239 u8 reserved_1[43];
240 u8 cpl;
241 u8 reserved_2[4];
242 u64 efer;
Tom Lendacky86137772020-12-10 11:10:07 -0600243 u8 reserved_3[104];
244 u64 xss; /* Valid for SEV-ES only */
Avi Kivity6aa8b732006-12-10 02:21:36 -0800245 u64 cr4;
246 u64 cr3;
247 u64 cr0;
248 u64 dr7;
249 u64 dr6;
250 u64 rflags;
251 u64 rip;
252 u8 reserved_4[88];
253 u64 rsp;
254 u8 reserved_5[24];
255 u64 rax;
256 u64 star;
257 u64 lstar;
258 u64 cstar;
259 u64 sfmask;
260 u64 kernel_gs_base;
261 u64 sysenter_cs;
262 u64 sysenter_esp;
263 u64 sysenter_eip;
264 u64 cr2;
265 u8 reserved_6[32];
266 u64 g_pat;
267 u64 dbgctl;
268 u64 br_from;
269 u64 br_to;
270 u64 last_excp_from;
271 u64 last_excp_to;
Tom Lendackyd07f46f2020-09-07 15:15:03 +0200272
273 /*
274 * The following part of the save area is valid only for
Tom Lendacky86137772020-12-10 11:10:07 -0600275 * SEV-ES guests when referenced through the GHCB or for
276 * saving to the host save area.
Tom Lendackyd07f46f2020-09-07 15:15:03 +0200277 */
Babu Mogerd00b99c2021-02-17 10:56:04 -0500278 u8 reserved_7[72];
279 u32 spec_ctrl; /* Guest version of SPEC_CTRL at 0x2E0 */
280 u8 reserved_7b[4];
Tom Lendacky86137772020-12-10 11:10:07 -0600281 u32 pkru;
282 u8 reserved_7a[20];
Tom Lendackyd07f46f2020-09-07 15:15:03 +0200283 u64 reserved_8; /* rax already available at 0x01f8 */
284 u64 rcx;
285 u64 rdx;
286 u64 rbx;
287 u64 reserved_9; /* rsp already available at 0x01d8 */
288 u64 rbp;
289 u64 rsi;
290 u64 rdi;
291 u64 r8;
292 u64 r9;
293 u64 r10;
294 u64 r11;
295 u64 r12;
296 u64 r13;
297 u64 r14;
298 u64 r15;
299 u8 reserved_10[16];
300 u64 sw_exit_code;
301 u64 sw_exit_info_1;
302 u64 sw_exit_info_2;
303 u64 sw_scratch;
304 u8 reserved_11[56];
305 u64 xcr0;
306 u8 valid_bitmap[16];
307 u64 x87_state_gpa;
Borislav Petkov976bc5e2020-09-07 15:15:05 +0200308} __packed;
Avi Kivity6aa8b732006-12-10 02:21:36 -0800309
Tom Lendackyd07f46f2020-09-07 15:15:03 +0200310struct ghcb {
311 struct vmcb_save_area save;
312 u8 reserved_save[2048 - sizeof(struct vmcb_save_area)];
313
314 u8 shared_buffer[2032];
315
316 u8 reserved_1[10];
317 u16 protocol_version; /* negotiated SEV-ES/GHCB protocol version */
318 u32 ghcb_usage;
319} __packed;
320
321
322#define EXPECTED_VMCB_SAVE_AREA_SIZE 1032
Vineeth Pillai59d21d62021-06-03 15:14:37 +0000323#define EXPECTED_VMCB_CONTROL_AREA_SIZE 1024
Tom Lendackyd07f46f2020-09-07 15:15:03 +0200324#define EXPECTED_GHCB_SIZE PAGE_SIZE
Paolo Bonzini7923ef42020-05-18 15:24:46 -0400325
326static inline void __unused_size_checks(void)
327{
Tom Lendackyd07f46f2020-09-07 15:15:03 +0200328 BUILD_BUG_ON(sizeof(struct vmcb_save_area) != EXPECTED_VMCB_SAVE_AREA_SIZE);
329 BUILD_BUG_ON(sizeof(struct vmcb_control_area) != EXPECTED_VMCB_CONTROL_AREA_SIZE);
330 BUILD_BUG_ON(sizeof(struct ghcb) != EXPECTED_GHCB_SIZE);
Paolo Bonzini7923ef42020-05-18 15:24:46 -0400331}
332
Borislav Petkov976bc5e2020-09-07 15:15:05 +0200333struct vmcb {
Avi Kivity6aa8b732006-12-10 02:21:36 -0800334 struct vmcb_control_area control;
335 struct vmcb_save_area save;
Borislav Petkov976bc5e2020-09-07 15:15:05 +0200336} __packed;
Avi Kivity6aa8b732006-12-10 02:21:36 -0800337
Avi Kivity6aa8b732006-12-10 02:21:36 -0800338#define SVM_CPUID_FUNC 0x8000000a
339
Joerg Roedel6031a612007-06-22 12:29:50 +0300340#define SVM_VM_CR_SVM_DISABLE 4
341
Avi Kivity6aa8b732006-12-10 02:21:36 -0800342#define SVM_SELECTOR_S_SHIFT 4
343#define SVM_SELECTOR_DPL_SHIFT 5
344#define SVM_SELECTOR_P_SHIFT 7
345#define SVM_SELECTOR_AVL_SHIFT 8
346#define SVM_SELECTOR_L_SHIFT 9
347#define SVM_SELECTOR_DB_SHIFT 10
348#define SVM_SELECTOR_G_SHIFT 11
349
350#define SVM_SELECTOR_TYPE_MASK (0xf)
351#define SVM_SELECTOR_S_MASK (1 << SVM_SELECTOR_S_SHIFT)
352#define SVM_SELECTOR_DPL_MASK (3 << SVM_SELECTOR_DPL_SHIFT)
353#define SVM_SELECTOR_P_MASK (1 << SVM_SELECTOR_P_SHIFT)
354#define SVM_SELECTOR_AVL_MASK (1 << SVM_SELECTOR_AVL_SHIFT)
355#define SVM_SELECTOR_L_MASK (1 << SVM_SELECTOR_L_SHIFT)
356#define SVM_SELECTOR_DB_MASK (1 << SVM_SELECTOR_DB_SHIFT)
357#define SVM_SELECTOR_G_MASK (1 << SVM_SELECTOR_G_SHIFT)
358
359#define SVM_SELECTOR_WRITE_MASK (1 << 1)
360#define SVM_SELECTOR_READ_MASK SVM_SELECTOR_WRITE_MASK
361#define SVM_SELECTOR_CODE_MASK (1 << 3)
362
Avi Kivity6aa8b732006-12-10 02:21:36 -0800363#define SVM_EVTINJ_VEC_MASK 0xff
364
365#define SVM_EVTINJ_TYPE_SHIFT 8
366#define SVM_EVTINJ_TYPE_MASK (7 << SVM_EVTINJ_TYPE_SHIFT)
367
368#define SVM_EVTINJ_TYPE_INTR (0 << SVM_EVTINJ_TYPE_SHIFT)
369#define SVM_EVTINJ_TYPE_NMI (2 << SVM_EVTINJ_TYPE_SHIFT)
370#define SVM_EVTINJ_TYPE_EXEPT (3 << SVM_EVTINJ_TYPE_SHIFT)
371#define SVM_EVTINJ_TYPE_SOFT (4 << SVM_EVTINJ_TYPE_SHIFT)
372
373#define SVM_EVTINJ_VALID (1 << 31)
374#define SVM_EVTINJ_VALID_ERR (1 << 11)
375
376#define SVM_EXITINTINFO_VEC_MASK SVM_EVTINJ_VEC_MASK
Gleb Natapov64a7ec02009-03-30 16:03:29 +0300377#define SVM_EXITINTINFO_TYPE_MASK SVM_EVTINJ_TYPE_MASK
Avi Kivity6aa8b732006-12-10 02:21:36 -0800378
379#define SVM_EXITINTINFO_TYPE_INTR SVM_EVTINJ_TYPE_INTR
380#define SVM_EXITINTINFO_TYPE_NMI SVM_EVTINJ_TYPE_NMI
381#define SVM_EXITINTINFO_TYPE_EXEPT SVM_EVTINJ_TYPE_EXEPT
382#define SVM_EXITINTINFO_TYPE_SOFT SVM_EVTINJ_TYPE_SOFT
383
384#define SVM_EXITINTINFO_VALID SVM_EVTINJ_VALID
385#define SVM_EXITINTINFO_VALID_ERR SVM_EVTINJ_VALID_ERR
386
Izik Eidus37817f22008-03-24 23:14:53 +0200387#define SVM_EXITINFOSHIFT_TS_REASON_IRET 36
388#define SVM_EXITINFOSHIFT_TS_REASON_JMP 38
Jan Kiszkae269fb22010-04-14 15:51:09 +0200389#define SVM_EXITINFOSHIFT_TS_HAS_ERROR_CODE 44
Izik Eidus37817f22008-03-24 23:14:53 +0200390
Andre Przywara7ff76d52010-12-21 11:12:04 +0100391#define SVM_EXITINFO_REG_MASK 0x0F
392
Avi Kivitydc772702010-01-06 13:13:01 +0200393#define SVM_CR0_SELECTIVE_MASK (X86_CR0_TS | X86_CR0_MP)
Avi Kivity6aa8b732006-12-10 02:21:36 -0800394
Joerg Roedel3702c2f2020-09-07 15:15:04 +0200395/* GHCB Accessor functions */
396
397#define GHCB_BITMAP_IDX(field) \
398 (offsetof(struct vmcb_save_area, field) / sizeof(u64))
399
400#define DEFINE_GHCB_ACCESSORS(field) \
401 static inline bool ghcb_##field##_is_valid(const struct ghcb *ghcb) \
402 { \
403 return test_bit(GHCB_BITMAP_IDX(field), \
404 (unsigned long *)&ghcb->save.valid_bitmap); \
405 } \
406 \
Tom Lendacky0f60bde2020-12-10 11:09:39 -0600407 static inline u64 ghcb_get_##field(struct ghcb *ghcb) \
408 { \
409 return ghcb->save.field; \
410 } \
411 \
412 static inline u64 ghcb_get_##field##_if_valid(struct ghcb *ghcb) \
413 { \
414 return ghcb_##field##_is_valid(ghcb) ? ghcb->save.field : 0; \
415 } \
416 \
Joerg Roedel3702c2f2020-09-07 15:15:04 +0200417 static inline void ghcb_set_##field(struct ghcb *ghcb, u64 value) \
418 { \
419 __set_bit(GHCB_BITMAP_IDX(field), \
420 (unsigned long *)&ghcb->save.valid_bitmap); \
421 ghcb->save.field = value; \
422 }
423
424DEFINE_GHCB_ACCESSORS(cpl)
425DEFINE_GHCB_ACCESSORS(rip)
426DEFINE_GHCB_ACCESSORS(rsp)
427DEFINE_GHCB_ACCESSORS(rax)
428DEFINE_GHCB_ACCESSORS(rcx)
429DEFINE_GHCB_ACCESSORS(rdx)
430DEFINE_GHCB_ACCESSORS(rbx)
431DEFINE_GHCB_ACCESSORS(rbp)
432DEFINE_GHCB_ACCESSORS(rsi)
433DEFINE_GHCB_ACCESSORS(rdi)
434DEFINE_GHCB_ACCESSORS(r8)
435DEFINE_GHCB_ACCESSORS(r9)
436DEFINE_GHCB_ACCESSORS(r10)
437DEFINE_GHCB_ACCESSORS(r11)
438DEFINE_GHCB_ACCESSORS(r12)
439DEFINE_GHCB_ACCESSORS(r13)
440DEFINE_GHCB_ACCESSORS(r14)
441DEFINE_GHCB_ACCESSORS(r15)
442DEFINE_GHCB_ACCESSORS(sw_exit_code)
443DEFINE_GHCB_ACCESSORS(sw_exit_info_1)
444DEFINE_GHCB_ACCESSORS(sw_exit_info_2)
445DEFINE_GHCB_ACCESSORS(sw_scratch)
446DEFINE_GHCB_ACCESSORS(xcr0)
447
Avi Kivity6aa8b732006-12-10 02:21:36 -0800448#endif