Thomas Gleixner | caab277 | 2019-06-03 07:44:50 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2012,2013 - ARM Ltd |
| 4 | * Author: Marc Zyngier <marc.zyngier@arm.com> |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 7 | #include <linux/arm-smccc.h> |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 8 | #include <linux/linkage.h> |
| 9 | |
Will Deacon | 5f1f7f6 | 2020-06-30 13:53:07 +0100 | [diff] [blame] | 10 | #include <asm/alternative.h> |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 11 | #include <asm/assembler.h> |
| 12 | #include <asm/kvm_arm.h> |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 13 | #include <asm/kvm_asm.h> |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 14 | #include <asm/kvm_mmu.h> |
Ard Biesheuvel | e4c5a68 | 2015-03-19 16:42:28 +0000 | [diff] [blame] | 15 | #include <asm/pgtable-hwdef.h> |
Geoff Levand | e7227d0 | 2016-04-27 17:47:01 +0100 | [diff] [blame] | 16 | #include <asm/sysreg.h> |
Marc Zyngier | fb1b4e0 | 2017-04-03 19:37:40 +0100 | [diff] [blame] | 17 | #include <asm/virt.h> |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 18 | |
| 19 | .text |
| 20 | .pushsection .hyp.idmap.text, "ax" |
| 21 | |
| 22 | .align 11 |
| 23 | |
Mark Brown | 617a2f3 | 2020-02-18 19:58:37 +0000 | [diff] [blame] | 24 | SYM_CODE_START(__kvm_hyp_init) |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 25 | ventry __invalid // Synchronous EL2t |
| 26 | ventry __invalid // IRQ EL2t |
| 27 | ventry __invalid // FIQ EL2t |
| 28 | ventry __invalid // Error EL2t |
| 29 | |
| 30 | ventry __invalid // Synchronous EL2h |
| 31 | ventry __invalid // IRQ EL2h |
| 32 | ventry __invalid // FIQ EL2h |
| 33 | ventry __invalid // Error EL2h |
| 34 | |
| 35 | ventry __do_hyp_init // Synchronous 64-bit EL1 |
| 36 | ventry __invalid // IRQ 64-bit EL1 |
| 37 | ventry __invalid // FIQ 64-bit EL1 |
| 38 | ventry __invalid // Error 64-bit EL1 |
| 39 | |
| 40 | ventry __invalid // Synchronous 32-bit EL1 |
| 41 | ventry __invalid // IRQ 32-bit EL1 |
| 42 | ventry __invalid // FIQ 32-bit EL1 |
| 43 | ventry __invalid // Error 32-bit EL1 |
| 44 | |
| 45 | __invalid: |
| 46 | b . |
| 47 | |
| 48 | /* |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 49 | * x0: SMCCC function ID |
| 50 | * x1: HYP pgd |
| 51 | * x2: per-CPU offset |
| 52 | * x3: HYP stack |
| 53 | * x4: HYP vectors |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 54 | */ |
| 55 | __do_hyp_init: |
Marc Zyngier | fb1b4e0 | 2017-04-03 19:37:40 +0100 | [diff] [blame] | 56 | /* Check for a stub HVC call */ |
| 57 | cmp x0, #HVC_STUB_HCALL_NR |
| 58 | b.lo __kvm_handle_stub_hvc |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 59 | |
Marc Zyngier | 28e81c6 | 2020-10-26 09:51:09 +0000 | [diff] [blame] | 60 | // We only actively check bits [24:31], and everything |
| 61 | // else has to be zero, which we check at build time. |
| 62 | #if (KVM_HOST_SMCCC_FUNC(__kvm_hyp_init) & 0xFFFFFFFF00FFFFFF) |
| 63 | #error Unexpected __KVM_HOST_SMCCC_FUNC___kvm_hyp_init value |
| 64 | #endif |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 65 | |
Marc Zyngier | 28e81c6 | 2020-10-26 09:51:09 +0000 | [diff] [blame] | 66 | ror x0, x0, #24 |
| 67 | eor x0, x0, #((KVM_HOST_SMCCC_FUNC(__kvm_hyp_init) >> 24) & 0xF) |
| 68 | ror x0, x0, #4 |
| 69 | eor x0, x0, #((KVM_HOST_SMCCC_FUNC(__kvm_hyp_init) >> 28) & 0xF) |
| 70 | cbz x0, 1f |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 71 | mov x0, #SMCCC_RET_NOT_SUPPORTED |
| 72 | eret |
| 73 | |
Marc Zyngier | 28e81c6 | 2020-10-26 09:51:09 +0000 | [diff] [blame] | 74 | 1: |
| 75 | /* Set tpidr_el2 for use by HYP to free a register */ |
| 76 | msr tpidr_el2, x2 |
| 77 | |
| 78 | phys_to_ttbr x0, x1 |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 79 | alternative_if ARM64_HAS_CNP |
| 80 | orr x0, x0, #TTBR_CNP_BIT |
| 81 | alternative_else_nop_endif |
| 82 | msr ttbr0_el2, x0 |
| 83 | |
| 84 | mrs x0, tcr_el1 |
| 85 | mov_q x1, TCR_EL2_MASK |
| 86 | and x0, x0, x1 |
| 87 | mov x1, #TCR_EL2_RES1 |
| 88 | orr x0, x0, x1 |
Ard Biesheuvel | e4c5a68 | 2015-03-19 16:42:28 +0000 | [diff] [blame] | 89 | |
Ard Biesheuvel | e4c5a68 | 2015-03-19 16:42:28 +0000 | [diff] [blame] | 90 | /* |
Kristina Martsenko | fa2a844 | 2017-12-13 17:07:24 +0000 | [diff] [blame] | 91 | * The ID map may be configured to use an extended virtual address |
| 92 | * range. This is only the case if system RAM is out of range for the |
| 93 | * currently configured page size and VA_BITS, in which case we will |
| 94 | * also need the extended virtual range for the HYP ID map, or we won't |
| 95 | * be able to enable the EL2 MMU. |
Ard Biesheuvel | e4c5a68 | 2015-03-19 16:42:28 +0000 | [diff] [blame] | 96 | * |
| 97 | * However, at EL2, there is only one TTBR register, and we can't switch |
| 98 | * between translation tables *and* update TCR_EL2.T0SZ at the same |
Kristina Martsenko | fa2a844 | 2017-12-13 17:07:24 +0000 | [diff] [blame] | 99 | * time. Bottom line: we need to use the extended range with *both* our |
| 100 | * translation tables. |
Ard Biesheuvel | e4c5a68 | 2015-03-19 16:42:28 +0000 | [diff] [blame] | 101 | * |
| 102 | * So use the same T0SZ value we use for the ID map. |
| 103 | */ |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 104 | ldr_l x1, idmap_t0sz |
| 105 | bfi x0, x1, TCR_T0SZ_OFFSET, TCR_TxSZ_WIDTH |
Kristina Martsenko | fa2a844 | 2017-12-13 17:07:24 +0000 | [diff] [blame] | 106 | |
Tirumalesh Chalamarla | 3c5b1d9 | 2016-02-10 10:46:53 -0800 | [diff] [blame] | 107 | /* |
Kristina Martsenko | 787fd1d | 2017-12-13 17:07:17 +0000 | [diff] [blame] | 108 | * Set the PS bits in TCR_EL2. |
Tirumalesh Chalamarla | 3c5b1d9 | 2016-02-10 10:46:53 -0800 | [diff] [blame] | 109 | */ |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 110 | tcr_compute_pa_size x0, #TCR_EL2_PS_SHIFT, x1, x2 |
Tirumalesh Chalamarla | 3c5b1d9 | 2016-02-10 10:46:53 -0800 | [diff] [blame] | 111 | |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 112 | msr tcr_el2, x0 |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 113 | |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 114 | mrs x0, mair_el1 |
| 115 | msr mair_el2, x0 |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 116 | isb |
| 117 | |
Pranavkumar Sawargaonkar | f6edbbf | 2014-07-31 12:23:23 +0530 | [diff] [blame] | 118 | /* Invalidate the stale TLBs from Bootloader */ |
| 119 | tlbi alle2 |
| 120 | dsb sy |
| 121 | |
Marc Zyngier | d68c1f7f | 2017-06-06 19:08:33 +0100 | [diff] [blame] | 122 | /* |
| 123 | * Preserve all the RES1 bits while setting the default flags, |
Marc Zyngier | 78fd6dc | 2017-06-06 19:08:34 +0100 | [diff] [blame] | 124 | * as well as the EE bit on BE. Drop the A flag since the compiler |
| 125 | * is allowed to generate unaligned accesses. |
Marc Zyngier | d68c1f7f | 2017-06-06 19:08:33 +0100 | [diff] [blame] | 126 | */ |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 127 | mov_q x0, (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A)) |
| 128 | CPU_BE( orr x0, x0, #SCTLR_ELx_EE) |
Marc Zyngier | 4a95a1b | 2020-06-11 11:54:01 +0100 | [diff] [blame] | 129 | alternative_if ARM64_HAS_ADDRESS_AUTH |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 130 | mov_q x1, (SCTLR_ELx_ENIA | SCTLR_ELx_ENIB | \ |
Marc Zyngier | 4a95a1b | 2020-06-11 11:54:01 +0100 | [diff] [blame] | 131 | SCTLR_ELx_ENDA | SCTLR_ELx_ENDB) |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 132 | orr x0, x0, x1 |
Marc Zyngier | 4a95a1b | 2020-06-11 11:54:01 +0100 | [diff] [blame] | 133 | alternative_else_nop_endif |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 134 | msr sctlr_el2, x0 |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 135 | isb |
| 136 | |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 137 | /* Set the stack and new vectors */ |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 138 | mov sp, x3 |
| 139 | msr vbar_el2, x4 |
James Morse | 1f74267 | 2018-01-08 15:38:07 +0000 | [diff] [blame] | 140 | |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 141 | /* Hello, World! */ |
Andrew Scull | 04e4caa | 2020-09-15 11:46:42 +0100 | [diff] [blame] | 142 | mov x0, #SMCCC_RET_SUCCESS |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 143 | eret |
Mark Brown | 617a2f3 | 2020-02-18 19:58:37 +0000 | [diff] [blame] | 144 | SYM_CODE_END(__kvm_hyp_init) |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 145 | |
Mark Brown | 617a2f3 | 2020-02-18 19:58:37 +0000 | [diff] [blame] | 146 | SYM_CODE_START(__kvm_handle_stub_hvc) |
Marc Zyngier | 0b51c54 | 2017-04-03 19:38:04 +0100 | [diff] [blame] | 147 | cmp x0, #HVC_SOFT_RESTART |
Marc Zyngier | 506c372 | 2017-04-03 19:37:44 +0100 | [diff] [blame] | 148 | b.ne 1f |
| 149 | |
| 150 | /* This is where we're about to jump, staying at EL2 */ |
| 151 | msr elr_el2, x1 |
| 152 | mov x0, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT | PSR_MODE_EL2h) |
| 153 | msr spsr_el2, x0 |
| 154 | |
| 155 | /* Shuffle the arguments, and don't come back */ |
| 156 | mov x0, x2 |
| 157 | mov x1, x3 |
| 158 | mov x2, x4 |
| 159 | b reset |
| 160 | |
Marc Zyngier | 82529d9 | 2017-04-03 19:37:41 +0100 | [diff] [blame] | 161 | 1: cmp x0, #HVC_RESET_VECTORS |
Marc Zyngier | fb1b4e0 | 2017-04-03 19:37:40 +0100 | [diff] [blame] | 162 | b.ne 1f |
Andrew Scull | b9e10d4 | 2020-07-06 10:52:59 +0100 | [diff] [blame] | 163 | |
AKASHI Takahiro | 67f6919 | 2016-04-27 17:47:05 +0100 | [diff] [blame] | 164 | /* |
Andrew Scull | b9e10d4 | 2020-07-06 10:52:59 +0100 | [diff] [blame] | 165 | * Set the HVC_RESET_VECTORS return code before entering the common |
| 166 | * path so that we do not clobber x0-x2 in case we are coming via |
| 167 | * HVC_SOFT_RESTART. |
AKASHI Takahiro | 67f6919 | 2016-04-27 17:47:05 +0100 | [diff] [blame] | 168 | */ |
Andrew Scull | b9e10d4 | 2020-07-06 10:52:59 +0100 | [diff] [blame] | 169 | mov x0, xzr |
| 170 | reset: |
| 171 | /* Reset kvm back to the hyp stub. */ |
Marc Zyngier | 506c372 | 2017-04-03 19:37:44 +0100 | [diff] [blame] | 172 | mrs x5, sctlr_el2 |
Remi Denis-Courmont | dc374b4 | 2020-03-04 11:36:31 +0200 | [diff] [blame] | 173 | mov_q x6, SCTLR_ELx_FLAGS |
Marc Zyngier | 506c372 | 2017-04-03 19:37:44 +0100 | [diff] [blame] | 174 | bic x5, x5, x6 // Clear SCTL_M and etc |
Shanker Donthineni | 3060e9f | 2018-01-29 11:59:52 +0000 | [diff] [blame] | 175 | pre_disable_mmu_workaround |
Marc Zyngier | 506c372 | 2017-04-03 19:37:44 +0100 | [diff] [blame] | 176 | msr sctlr_el2, x5 |
AKASHI Takahiro | 67f6919 | 2016-04-27 17:47:05 +0100 | [diff] [blame] | 177 | isb |
| 178 | |
AKASHI Takahiro | 67f6919 | 2016-04-27 17:47:05 +0100 | [diff] [blame] | 179 | /* Install stub vectors */ |
Marc Zyngier | 506c372 | 2017-04-03 19:37:44 +0100 | [diff] [blame] | 180 | adr_l x5, __hyp_stub_vectors |
| 181 | msr vbar_el2, x5 |
Marc Zyngier | af42f20 | 2017-04-03 19:38:05 +0100 | [diff] [blame] | 182 | eret |
AKASHI Takahiro | 67f6919 | 2016-04-27 17:47:05 +0100 | [diff] [blame] | 183 | |
Marc Zyngier | fb1b4e0 | 2017-04-03 19:37:40 +0100 | [diff] [blame] | 184 | 1: /* Bad stub call */ |
Remi Denis-Courmont | dc374b4 | 2020-03-04 11:36:31 +0200 | [diff] [blame] | 185 | mov_q x0, HVC_STUB_ERR |
AKASHI Takahiro | 67f6919 | 2016-04-27 17:47:05 +0100 | [diff] [blame] | 186 | eret |
Marc Zyngier | af42f20 | 2017-04-03 19:38:05 +0100 | [diff] [blame] | 187 | |
Mark Brown | 617a2f3 | 2020-02-18 19:58:37 +0000 | [diff] [blame] | 188 | SYM_CODE_END(__kvm_handle_stub_hvc) |
AKASHI Takahiro | 67f6919 | 2016-04-27 17:47:05 +0100 | [diff] [blame] | 189 | |
Marc Zyngier | 092bd14 | 2012-12-17 17:07:52 +0000 | [diff] [blame] | 190 | .popsection |