blob: 0531a1492fdf2564e7afa4aacf2b637b174a55f1 [file] [log] [blame]
Simon Guo009c8722018-05-23 15:01:47 +08001/*
2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License, version 2, as
4 * published by the Free Software Foundation.
5 *
6 * This program is distributed in the hope that it will be useful,
7 * but WITHOUT ANY WARRANTY; without even the implied warranty of
8 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 * GNU General Public License for more details.
10 *
11 * Derived from book3s_hv_rmhandlers.S, which is:
12 *
13 * Copyright 2011 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com>
14 *
15 */
16
17#include <asm/reg.h>
18#include <asm/ppc_asm.h>
19#include <asm/asm-offsets.h>
20#include <asm/export.h>
21#include <asm/tm.h>
22#include <asm/cputable.h>
23
24#ifdef CONFIG_PPC_TRANSACTIONAL_MEM
25#define VCPU_GPRS_TM(reg) (((reg) * ULONG_SIZE) + VCPU_GPR_TM)
26
27/*
28 * Save transactional state and TM-related registers.
Simon Guo6f597c62018-05-23 15:01:48 +080029 * Called with:
30 * - r3 pointing to the vcpu struct
Paul Mackerras7854f752018-10-08 16:30:53 +110031 * - r4 containing the MSR with current TS bits:
Simon Guo6f597c62018-05-23 15:01:48 +080032 * (For HV KVM, it is VCPU_MSR ; For PR KVM, it is host MSR).
Paul Mackerras7854f752018-10-08 16:30:53 +110033 * - r5 containing a flag indicating that non-volatile registers
34 * must be preserved.
35 * If r5 == 0, this can modify all checkpointed registers, but
36 * restores r1, r2 before exit. If r5 != 0, this restores the
37 * MSR TM/FP/VEC/VSX bits to their state on entry.
Simon Guo009c8722018-05-23 15:01:47 +080038 */
Simon Guocaa3be92018-05-23 15:01:50 +080039_GLOBAL(__kvmppc_save_tm)
Simon Guo009c8722018-05-23 15:01:47 +080040 mflr r0
41 std r0, PPC_LR_STKOFF(r1)
Paul Mackerras7854f752018-10-08 16:30:53 +110042 stdu r1, -SWITCH_FRAME_SIZE(r1)
43
44 mr r9, r3
45 cmpdi cr7, r5, 0
Simon Guo009c8722018-05-23 15:01:47 +080046
47 /* Turn on TM. */
48 mfmsr r8
Paul Mackerras7854f752018-10-08 16:30:53 +110049 mr r10, r8
Simon Guo009c8722018-05-23 15:01:47 +080050 li r0, 1
51 rldimi r8, r0, MSR_TM_LG, 63-MSR_TM_LG
Simon Guo7f386af2018-05-23 15:01:49 +080052 ori r8, r8, MSR_FP
53 oris r8, r8, (MSR_VEC | MSR_VSX)@h
Simon Guo009c8722018-05-23 15:01:47 +080054 mtmsrd r8
55
Simon Guo6f597c62018-05-23 15:01:48 +080056 rldicl. r4, r4, 64 - MSR_TS_S_LG, 62
Simon Guo009c8722018-05-23 15:01:47 +080057 beq 1f /* TM not active in guest. */
Simon Guo009c8722018-05-23 15:01:47 +080058
Simon Guo6f597c62018-05-23 15:01:48 +080059 std r1, HSTATE_SCRATCH2(r13)
60 std r3, HSTATE_SCRATCH1(r13)
Simon Guo009c8722018-05-23 15:01:47 +080061
Paul Mackerras7854f752018-10-08 16:30:53 +110062 /* Save CR on the stack - even if r5 == 0 we need to get cr7 back. */
63 mfcr r6
64 SAVE_GPR(6, r1)
65
66 /* Save DSCR so we can restore it to avoid running with user value */
67 mfspr r7, SPRN_DSCR
68 SAVE_GPR(7, r1)
69
70 /*
71 * We are going to do treclaim., which will modify all checkpointed
72 * registers. Save the non-volatile registers on the stack if
73 * preservation of non-volatile state has been requested.
74 */
75 beq cr7, 3f
76 SAVE_NVGPRS(r1)
77
78 /* MSR[TS] will be 0 (non-transactional) once we do treclaim. */
79 li r0, 0
80 rldimi r10, r0, MSR_TS_S_LG, 63 - MSR_TS_T_LG
81 SAVE_GPR(10, r1) /* final MSR value */
823:
Simon Guo009c8722018-05-23 15:01:47 +080083#ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE
84BEGIN_FTR_SECTION
85 /* Emulation of the treclaim instruction needs TEXASR before treclaim */
86 mfspr r6, SPRN_TEXASR
Simon Guo6f597c62018-05-23 15:01:48 +080087 std r6, VCPU_ORIG_TEXASR(r3)
Simon Guo009c8722018-05-23 15:01:47 +080088END_FTR_SECTION_IFSET(CPU_FTR_P9_TM_HV_ASSIST)
89#endif
90
91 /* Clear the MSR RI since r1, r13 are all going to be foobar. */
92 li r5, 0
93 mtmsrd r5, 1
94
Simon Guo6f597c62018-05-23 15:01:48 +080095 li r3, TM_CAUSE_KVM_RESCHED
96
Simon Guo009c8722018-05-23 15:01:47 +080097 /* All GPRs are volatile at this point. */
98 TRECLAIM(R3)
99
100 /* Temporarily store r13 and r9 so we have some regs to play with */
101 SET_SCRATCH0(r13)
102 GET_PACA(r13)
103 std r9, PACATMSCRATCH(r13)
Simon Guo6f597c62018-05-23 15:01:48 +0800104 ld r9, HSTATE_SCRATCH1(r13)
Simon Guo009c8722018-05-23 15:01:47 +0800105
Paul Mackerras7854f752018-10-08 16:30:53 +1100106 /* Save away PPR soon so we don't run with user value. */
107 std r0, VCPU_GPRS_TM(0)(r9)
108 mfspr r0, SPRN_PPR
Simon Guo009c8722018-05-23 15:01:47 +0800109 HMT_MEDIUM
Simon Guo009c8722018-05-23 15:01:47 +0800110
Paul Mackerras7854f752018-10-08 16:30:53 +1100111 /* Reload stack pointer. */
112 std r1, VCPU_GPRS_TM(1)(r9)
113 ld r1, HSTATE_SCRATCH2(r13)
114
115 /* Set MSR RI now we have r1 and r13 back. */
116 std r2, VCPU_GPRS_TM(2)(r9)
117 li r2, MSR_RI
118 mtmsrd r2, 1
119
120 /* Reload TOC pointer. */
121 ld r2, PACATOC(r13)
122
123 /* Save all but r0-r2, r9 & r13 */
124 reg = 3
Simon Guo009c8722018-05-23 15:01:47 +0800125 .rept 29
126 .if (reg != 9) && (reg != 13)
127 std reg, VCPU_GPRS_TM(reg)(r9)
128 .endif
129 reg = reg + 1
130 .endr
131 /* ... now save r13 */
132 GET_SCRATCH0(r4)
133 std r4, VCPU_GPRS_TM(13)(r9)
134 /* ... and save r9 */
135 ld r4, PACATMSCRATCH(r13)
136 std r4, VCPU_GPRS_TM(9)(r9)
137
Paul Mackerras7854f752018-10-08 16:30:53 +1100138 /* Restore host DSCR and CR values, after saving guest values */
Simon Guo009c8722018-05-23 15:01:47 +0800139 mfcr r6
Paul Mackerras7854f752018-10-08 16:30:53 +1100140 mfspr r7, SPRN_DSCR
141 stw r6, VCPU_CR_TM(r9)
142 std r7, VCPU_DSCR_TM(r9)
143 REST_GPR(6, r1)
144 REST_GPR(7, r1)
145 mtcr r6
146 mtspr SPRN_DSCR, r7
147
148 /* Save away checkpointed SPRs. */
149 std r0, VCPU_PPR_TM(r9)
150 mflr r5
Simon Guo009c8722018-05-23 15:01:47 +0800151 mfctr r7
152 mfspr r8, SPRN_AMR
153 mfspr r10, SPRN_TAR
154 mfxer r11
155 std r5, VCPU_LR_TM(r9)
Simon Guo009c8722018-05-23 15:01:47 +0800156 std r7, VCPU_CTR_TM(r9)
157 std r8, VCPU_AMR_TM(r9)
158 std r10, VCPU_TAR_TM(r9)
159 std r11, VCPU_XER_TM(r9)
160
Simon Guo009c8722018-05-23 15:01:47 +0800161 /* Save FP/VSX. */
162 addi r3, r9, VCPU_FPRS_TM
163 bl store_fp_state
164 addi r3, r9, VCPU_VRS_TM
165 bl store_vr_state
166 mfspr r6, SPRN_VRSAVE
167 stw r6, VCPU_VRSAVE_TM(r9)
Paul Mackerras7854f752018-10-08 16:30:53 +1100168
169 /* Restore non-volatile registers if requested to */
170 beq cr7, 1f
171 REST_NVGPRS(r1)
172 REST_GPR(10, r1)
Simon Guo009c8722018-05-23 15:01:47 +08001731:
174 /*
175 * We need to save these SPRs after the treclaim so that the software
176 * error code is recorded correctly in the TEXASR. Also the user may
177 * change these outside of a transaction, so they must always be
178 * context switched.
179 */
180 mfspr r7, SPRN_TEXASR
181 std r7, VCPU_TEXASR(r9)
Simon Guo009c8722018-05-23 15:01:47 +0800182 mfspr r5, SPRN_TFHAR
183 mfspr r6, SPRN_TFIAR
184 std r5, VCPU_TFHAR(r9)
185 std r6, VCPU_TFIAR(r9)
186
Paul Mackerras7854f752018-10-08 16:30:53 +1100187 /* Restore MSR state if requested */
188 beq cr7, 2f
189 mtmsrd r10, 0
1902:
191 addi r1, r1, SWITCH_FRAME_SIZE
Simon Guo009c8722018-05-23 15:01:47 +0800192 ld r0, PPC_LR_STKOFF(r1)
193 mtlr r0
194 blr
195
196/*
Simon Guocaa3be92018-05-23 15:01:50 +0800197 * _kvmppc_save_tm_pr() is a wrapper around __kvmppc_save_tm(), so that it can
198 * be invoked from C function by PR KVM only.
199 */
200_GLOBAL(_kvmppc_save_tm_pr)
Paul Mackerras7854f752018-10-08 16:30:53 +1100201 mflr r0
202 std r0, PPC_LR_STKOFF(r1)
203 stdu r1, -PPC_MIN_STKFRM(r1)
Simon Guocaa3be92018-05-23 15:01:50 +0800204
Simon Guo7284ca82018-05-23 15:02:07 +0800205 mfspr r8, SPRN_TAR
Paul Mackerras7854f752018-10-08 16:30:53 +1100206 std r8, PPC_MIN_STKFRM-8(r1)
Simon Guo7284ca82018-05-23 15:02:07 +0800207
Paul Mackerras7854f752018-10-08 16:30:53 +1100208 li r5, 1 /* preserve non-volatile registers */
Simon Guocaa3be92018-05-23 15:01:50 +0800209 bl __kvmppc_save_tm
210
Paul Mackerras7854f752018-10-08 16:30:53 +1100211 ld r8, PPC_MIN_STKFRM-8(r1)
Simon Guo7284ca82018-05-23 15:02:07 +0800212 mtspr SPRN_TAR, r8
213
Paul Mackerras7854f752018-10-08 16:30:53 +1100214 addi r1, r1, PPC_MIN_STKFRM
215 ld r0, PPC_LR_STKOFF(r1)
216 mtlr r0
Simon Guocaa3be92018-05-23 15:01:50 +0800217 blr
218
219EXPORT_SYMBOL_GPL(_kvmppc_save_tm_pr);
220
221/*
Simon Guo009c8722018-05-23 15:01:47 +0800222 * Restore transactional state and TM-related registers.
Simon Guo6f597c62018-05-23 15:01:48 +0800223 * Called with:
224 * - r3 pointing to the vcpu struct.
225 * - r4 is the guest MSR with desired TS bits:
226 * For HV KVM, it is VCPU_MSR
227 * For PR KVM, it is provided by caller
Paul Mackerras7854f752018-10-08 16:30:53 +1100228 * - r5 containing a flag indicating that non-volatile registers
229 * must be preserved.
230 * If r5 == 0, this potentially modifies all checkpointed registers, but
231 * restores r1, r2 from the PACA before exit.
232 * If r5 != 0, this restores the MSR TM/FP/VEC/VSX bits to their state on entry.
Simon Guo009c8722018-05-23 15:01:47 +0800233 */
Simon Guocaa3be92018-05-23 15:01:50 +0800234_GLOBAL(__kvmppc_restore_tm)
Simon Guo009c8722018-05-23 15:01:47 +0800235 mflr r0
236 std r0, PPC_LR_STKOFF(r1)
237
Paul Mackerras7854f752018-10-08 16:30:53 +1100238 cmpdi cr7, r5, 0
239
Simon Guo009c8722018-05-23 15:01:47 +0800240 /* Turn on TM/FP/VSX/VMX so we can restore them. */
241 mfmsr r5
Paul Mackerras7854f752018-10-08 16:30:53 +1100242 mr r10, r5
Simon Guo009c8722018-05-23 15:01:47 +0800243 li r6, MSR_TM >> 32
244 sldi r6, r6, 32
245 or r5, r5, r6
246 ori r5, r5, MSR_FP
247 oris r5, r5, (MSR_VEC | MSR_VSX)@h
248 mtmsrd r5
249
250 /*
251 * The user may change these outside of a transaction, so they must
252 * always be context switched.
253 */
Simon Guo6f597c62018-05-23 15:01:48 +0800254 ld r5, VCPU_TFHAR(r3)
255 ld r6, VCPU_TFIAR(r3)
256 ld r7, VCPU_TEXASR(r3)
Simon Guo009c8722018-05-23 15:01:47 +0800257 mtspr SPRN_TFHAR, r5
258 mtspr SPRN_TFIAR, r6
259 mtspr SPRN_TEXASR, r7
260
Simon Guo6f597c62018-05-23 15:01:48 +0800261 mr r5, r4
Simon Guo009c8722018-05-23 15:01:47 +0800262 rldicl. r5, r5, 64 - MSR_TS_S_LG, 62
Paul Mackerras7854f752018-10-08 16:30:53 +1100263 beq 9f /* TM not active in guest */
Simon Guo009c8722018-05-23 15:01:47 +0800264
265 /* Make sure the failure summary is set, otherwise we'll program check
266 * when we trechkpt. It's possible that this might have been not set
267 * on a kvmppc_set_one_reg() call but we shouldn't let this crash the
268 * host.
269 */
270 oris r7, r7, (TEXASR_FS)@h
271 mtspr SPRN_TEXASR, r7
272
273 /*
Paul Mackerras7854f752018-10-08 16:30:53 +1100274 * Make a stack frame and save non-volatile registers if requested.
275 */
276 stdu r1, -SWITCH_FRAME_SIZE(r1)
277 std r1, HSTATE_SCRATCH2(r13)
278
279 mfcr r6
280 mfspr r7, SPRN_DSCR
281 SAVE_GPR(2, r1)
282 SAVE_GPR(6, r1)
283 SAVE_GPR(7, r1)
284
285 beq cr7, 4f
286 SAVE_NVGPRS(r1)
287
288 /* MSR[TS] will be 1 (suspended) once we do trechkpt */
289 li r0, 1
290 rldimi r10, r0, MSR_TS_S_LG, 63 - MSR_TS_T_LG
291 SAVE_GPR(10, r1) /* final MSR value */
2924:
293 /*
Simon Guo009c8722018-05-23 15:01:47 +0800294 * We need to load up the checkpointed state for the guest.
295 * We need to do this early as it will blow away any GPRs, VSRs and
296 * some SPRs.
297 */
298
Simon Guo6f597c62018-05-23 15:01:48 +0800299 mr r31, r3
Simon Guo009c8722018-05-23 15:01:47 +0800300 addi r3, r31, VCPU_FPRS_TM
301 bl load_fp_state
302 addi r3, r31, VCPU_VRS_TM
303 bl load_vr_state
Simon Guo6f597c62018-05-23 15:01:48 +0800304 mr r3, r31
305 lwz r7, VCPU_VRSAVE_TM(r3)
Simon Guo009c8722018-05-23 15:01:47 +0800306 mtspr SPRN_VRSAVE, r7
307
Simon Guo6f597c62018-05-23 15:01:48 +0800308 ld r5, VCPU_LR_TM(r3)
309 lwz r6, VCPU_CR_TM(r3)
310 ld r7, VCPU_CTR_TM(r3)
311 ld r8, VCPU_AMR_TM(r3)
312 ld r9, VCPU_TAR_TM(r3)
313 ld r10, VCPU_XER_TM(r3)
Simon Guo009c8722018-05-23 15:01:47 +0800314 mtlr r5
315 mtcr r6
316 mtctr r7
317 mtspr SPRN_AMR, r8
318 mtspr SPRN_TAR, r9
319 mtxer r10
320
321 /*
322 * Load up PPR and DSCR values but don't put them in the actual SPRs
323 * till the last moment to avoid running with userspace PPR and DSCR for
324 * too long.
325 */
Simon Guo6f597c62018-05-23 15:01:48 +0800326 ld r29, VCPU_DSCR_TM(r3)
327 ld r30, VCPU_PPR_TM(r3)
Simon Guo009c8722018-05-23 15:01:47 +0800328
Simon Guo009c8722018-05-23 15:01:47 +0800329 /* Clear the MSR RI since r1, r13 are all going to be foobar. */
330 li r5, 0
331 mtmsrd r5, 1
332
333 /* Load GPRs r0-r28 */
334 reg = 0
335 .rept 29
336 ld reg, VCPU_GPRS_TM(reg)(r31)
337 reg = reg + 1
338 .endr
339
340 mtspr SPRN_DSCR, r29
341 mtspr SPRN_PPR, r30
342
343 /* Load final GPRs */
344 ld 29, VCPU_GPRS_TM(29)(r31)
345 ld 30, VCPU_GPRS_TM(30)(r31)
346 ld 31, VCPU_GPRS_TM(31)(r31)
347
348 /* TM checkpointed state is now setup. All GPRs are now volatile. */
349 TRECHKPT
350
351 /* Now let's get back the state we need. */
352 HMT_MEDIUM
353 GET_PACA(r13)
Simon Guo6f597c62018-05-23 15:01:48 +0800354 ld r1, HSTATE_SCRATCH2(r13)
Paul Mackerras7854f752018-10-08 16:30:53 +1100355 REST_GPR(7, r1)
356 mtspr SPRN_DSCR, r7
Simon Guo009c8722018-05-23 15:01:47 +0800357
358 /* Set the MSR RI since we have our registers back. */
359 li r5, MSR_RI
360 mtmsrd r5, 1
Paul Mackerras7854f752018-10-08 16:30:53 +1100361
362 /* Restore TOC pointer and CR */
363 REST_GPR(2, r1)
364 REST_GPR(6, r1)
365 mtcr r6
366
367 /* Restore non-volatile registers if requested to. */
368 beq cr7, 5f
369 REST_GPR(10, r1)
370 REST_NVGPRS(r1)
371
3725: addi r1, r1, SWITCH_FRAME_SIZE
Simon Guo009c8722018-05-23 15:01:47 +0800373 ld r0, PPC_LR_STKOFF(r1)
374 mtlr r0
Paul Mackerras7854f752018-10-08 16:30:53 +1100375
3769: /* Restore MSR bits if requested */
377 beqlr cr7
378 mtmsrd r10, 0
Simon Guo009c8722018-05-23 15:01:47 +0800379 blr
Simon Guocaa3be92018-05-23 15:01:50 +0800380
381/*
382 * _kvmppc_restore_tm_pr() is a wrapper around __kvmppc_restore_tm(), so that it
383 * can be invoked from C function by PR KVM only.
384 */
385_GLOBAL(_kvmppc_restore_tm_pr)
Paul Mackerras7854f752018-10-08 16:30:53 +1100386 mflr r0
387 std r0, PPC_LR_STKOFF(r1)
388 stdu r1, -PPC_MIN_STKFRM(r1)
Simon Guocaa3be92018-05-23 15:01:50 +0800389
Paul Mackerras7854f752018-10-08 16:30:53 +1100390 /* save TAR so that it can be recovered later */
Simon Guo7284ca82018-05-23 15:02:07 +0800391 mfspr r8, SPRN_TAR
Paul Mackerras7854f752018-10-08 16:30:53 +1100392 std r8, PPC_MIN_STKFRM-8(r1)
Simon Guo7284ca82018-05-23 15:02:07 +0800393
Paul Mackerras7854f752018-10-08 16:30:53 +1100394 li r5, 1
Simon Guocaa3be92018-05-23 15:01:50 +0800395 bl __kvmppc_restore_tm
396
Paul Mackerras7854f752018-10-08 16:30:53 +1100397 ld r8, PPC_MIN_STKFRM-8(r1)
Simon Guo7284ca82018-05-23 15:02:07 +0800398 mtspr SPRN_TAR, r8
399
Paul Mackerras7854f752018-10-08 16:30:53 +1100400 addi r1, r1, PPC_MIN_STKFRM
401 ld r0, PPC_LR_STKOFF(r1)
402 mtlr r0
Simon Guocaa3be92018-05-23 15:01:50 +0800403 blr
404
405EXPORT_SYMBOL_GPL(_kvmppc_restore_tm_pr);
Simon Guo009c8722018-05-23 15:01:47 +0800406#endif /* CONFIG_PPC_TRANSACTIONAL_MEM */