Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 2 | #include <linux/io.h> |
Andy Lutomirski | 5ff3e2c | 2016-08-10 02:29:16 -0700 | [diff] [blame] | 3 | #include <linux/slab.h> |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 4 | #include <linux/memblock.h> |
Tom Lendacky | 32cb4d0 | 2021-09-08 17:58:36 -0500 | [diff] [blame] | 5 | #include <linux/cc_platform.h> |
Mike Rapoport | 65fddcf | 2020-06-08 21:32:42 -0700 | [diff] [blame] | 6 | #include <linux/pgtable.h> |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 7 | |
Laura Abbott | d116365 | 2017-05-08 15:58:11 -0700 | [diff] [blame] | 8 | #include <asm/set_memory.h> |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 9 | #include <asm/realmode.h> |
Andy Lutomirski | 18bc7bd | 2016-08-10 02:29:14 -0700 | [diff] [blame] | 10 | #include <asm/tlbflush.h> |
Lianbo Jiang | 6f599d8 | 2019-11-08 17:00:25 +0800 | [diff] [blame] | 11 | #include <asm/crash.h> |
Brijesh Singh | e759959 | 2021-04-27 06:16:34 -0500 | [diff] [blame] | 12 | #include <asm/sev.h> |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 13 | |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 14 | struct real_mode_header *real_mode_header; |
Jarkko Sakkinen | cda846f | 2012-05-08 21:22:46 +0300 | [diff] [blame] | 15 | u32 *trampoline_cr4_features; |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 16 | |
Thomas Garnier | b234e8a | 2016-06-21 17:47:01 -0700 | [diff] [blame] | 17 | /* Hold the pgd entry used on booting additional CPUs */ |
| 18 | pgd_t trampoline_pgd_entry; |
| 19 | |
Joerg Roedel | 71d5049 | 2021-12-02 16:32:25 +0100 | [diff] [blame] | 20 | void load_trampoline_pgtable(void) |
| 21 | { |
| 22 | #ifdef CONFIG_X86_32 |
| 23 | load_cr3(initial_page_table); |
| 24 | #else |
| 25 | /* |
| 26 | * This function is called before exiting to real-mode and that will |
| 27 | * fail with CR4.PCIDE still set. |
| 28 | */ |
| 29 | if (boot_cpu_has(X86_FEATURE_PCID)) |
| 30 | cr4_clear_bits(X86_CR4_PCIDE); |
| 31 | |
| 32 | write_cr3(real_mode_header->trampoline_pgd); |
| 33 | #endif |
| 34 | |
| 35 | /* |
| 36 | * The CR3 write above will not flush global TLB entries. |
| 37 | * Stale, global entries from previous page tables may still be |
| 38 | * present. Flush those stale entries. |
| 39 | * |
| 40 | * This ensures that memory accessed while running with |
| 41 | * trampoline_pgd is *actually* mapped into trampoline_pgd. |
| 42 | */ |
| 43 | __flush_tlb_all(); |
| 44 | } |
| 45 | |
Andy Lutomirski | 5ff3e2c | 2016-08-10 02:29:16 -0700 | [diff] [blame] | 46 | void __init reserve_real_mode(void) |
| 47 | { |
| 48 | phys_addr_t mem; |
| 49 | size_t size = real_mode_size_needed(); |
| 50 | |
| 51 | if (!size) |
| 52 | return; |
| 53 | |
| 54 | WARN_ON(slab_is_available()); |
| 55 | |
| 56 | /* Has to be under 1M so we can execute real-mode AP code. */ |
Mike Rapoport | a7259df | 2021-09-02 15:00:26 -0700 | [diff] [blame] | 57 | mem = memblock_phys_alloc_range(size, PAGE_SIZE, 0, 1<<20); |
Mike Rapoport | f1d4d47 | 2021-06-01 10:53:52 +0300 | [diff] [blame] | 58 | if (!mem) |
Andy Lutomirski | 5ff3e2c | 2016-08-10 02:29:16 -0700 | [diff] [blame] | 59 | pr_info("No sub-1M memory is available for the trampoline\n"); |
Mike Rapoport | f1d4d47 | 2021-06-01 10:53:52 +0300 | [diff] [blame] | 60 | else |
| 61 | set_real_mode_mem(mem); |
Andy Lutomirski | 5ff3e2c | 2016-08-10 02:29:16 -0700 | [diff] [blame] | 62 | |
Mike Rapoport | f1d4d47 | 2021-06-01 10:53:52 +0300 | [diff] [blame] | 63 | /* |
| 64 | * Unconditionally reserve the entire fisrt 1M, see comment in |
| 65 | * setup_arch(). |
| 66 | */ |
| 67 | memblock_reserve(0, SZ_1M); |
Andy Lutomirski | 5ff3e2c | 2016-08-10 02:29:16 -0700 | [diff] [blame] | 68 | } |
| 69 | |
Tom Lendacky | 8940ac9 | 2020-09-07 15:16:07 +0200 | [diff] [blame] | 70 | static void sme_sev_setup_real_mode(struct trampoline_header *th) |
| 71 | { |
| 72 | #ifdef CONFIG_AMD_MEM_ENCRYPT |
Tom Lendacky | 32cb4d0 | 2021-09-08 17:58:36 -0500 | [diff] [blame] | 73 | if (cc_platform_has(CC_ATTR_HOST_MEM_ENCRYPT)) |
Tom Lendacky | 8940ac9 | 2020-09-07 15:16:07 +0200 | [diff] [blame] | 74 | th->flags |= TH_FLAGS_SME_ACTIVE; |
| 75 | |
Tom Lendacky | 6283f2e | 2021-09-08 17:58:38 -0500 | [diff] [blame] | 76 | if (cc_platform_has(CC_ATTR_GUEST_STATE_ENCRYPT)) { |
Joerg Roedel | 3ecacdb | 2020-09-07 15:16:09 +0200 | [diff] [blame] | 77 | /* |
| 78 | * Skip the call to verify_cpu() in secondary_startup_64 as it |
| 79 | * will cause #VC exceptions when the AP can't handle them yet. |
| 80 | */ |
| 81 | th->start = (u64) secondary_startup_64_no_verify; |
| 82 | |
Tom Lendacky | 8940ac9 | 2020-09-07 15:16:07 +0200 | [diff] [blame] | 83 | if (sev_es_setup_ap_jump_table(real_mode_header)) |
| 84 | panic("Failed to get/update SEV-ES AP Jump Table"); |
| 85 | } |
| 86 | #endif |
| 87 | } |
| 88 | |
Andy Lutomirski | d0de0f6 | 2016-08-10 02:29:15 -0700 | [diff] [blame] | 89 | static void __init setup_real_mode(void) |
Yinghai Lu | 4f7b9226 | 2013-01-24 12:19:51 -0800 | [diff] [blame] | 90 | { |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 91 | u16 real_mode_seg; |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 92 | const u32 *rel; |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 93 | u32 count; |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 94 | unsigned char *base; |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 95 | unsigned long phys_base; |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 96 | struct trampoline_header *trampoline_header; |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 97 | size_t size = PAGE_ALIGN(real_mode_blob_end - real_mode_blob); |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 98 | #ifdef CONFIG_X86_64 |
| 99 | u64 *trampoline_pgd; |
H. Peter Anvin | 638d957 | 2012-05-16 14:02:05 -0700 | [diff] [blame] | 100 | u64 efer; |
Joerg Roedel | 51523ed | 2021-12-02 16:32:26 +0100 | [diff] [blame] | 101 | int i; |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 102 | #endif |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 103 | |
Yinghai Lu | 4f7b9226 | 2013-01-24 12:19:51 -0800 | [diff] [blame] | 104 | base = (unsigned char *)real_mode_header; |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 105 | |
Tom Lendacky | 163ea3c | 2017-07-17 16:10:20 -0500 | [diff] [blame] | 106 | /* |
| 107 | * If SME is active, the trampoline area will need to be in |
| 108 | * decrypted memory in order to bring up other processors |
Tom Lendacky | fcdcd6c | 2017-10-20 09:30:46 -0500 | [diff] [blame] | 109 | * successfully. This is not needed for SEV. |
Tom Lendacky | 163ea3c | 2017-07-17 16:10:20 -0500 | [diff] [blame] | 110 | */ |
Tom Lendacky | 32cb4d0 | 2021-09-08 17:58:36 -0500 | [diff] [blame] | 111 | if (cc_platform_has(CC_ATTR_HOST_MEM_ENCRYPT)) |
Tom Lendacky | fcdcd6c | 2017-10-20 09:30:46 -0500 | [diff] [blame] | 112 | set_memory_decrypted((unsigned long)base, size >> PAGE_SHIFT); |
Tom Lendacky | 163ea3c | 2017-07-17 16:10:20 -0500 | [diff] [blame] | 113 | |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 114 | memcpy(base, real_mode_blob, size); |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 115 | |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 116 | phys_base = __pa(base); |
| 117 | real_mode_seg = phys_base >> 4; |
| 118 | |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 119 | rel = (u32 *) real_mode_relocs; |
| 120 | |
| 121 | /* 16-bit segment relocations. */ |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 122 | count = *rel++; |
| 123 | while (count--) { |
| 124 | u16 *seg = (u16 *) (base + *rel++); |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 125 | *seg = real_mode_seg; |
| 126 | } |
| 127 | |
| 128 | /* 32-bit linear relocations. */ |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 129 | count = *rel++; |
| 130 | while (count--) { |
| 131 | u32 *ptr = (u32 *) (base + *rel++); |
| 132 | *ptr += phys_base; |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 133 | } |
| 134 | |
Ingo Molnar | 163b099 | 2021-03-21 22:28:53 +0100 | [diff] [blame] | 135 | /* Must be performed *after* relocation. */ |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 136 | trampoline_header = (struct trampoline_header *) |
| 137 | __va(real_mode_header->trampoline_header); |
| 138 | |
Jarkko Sakkinen | 48927bb | 2012-05-08 21:22:28 +0300 | [diff] [blame] | 139 | #ifdef CONFIG_X86_32 |
Alexander Duyck | fc8d782 | 2012-11-16 13:57:13 -0800 | [diff] [blame] | 140 | trampoline_header->start = __pa_symbol(startup_32_smp); |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 141 | trampoline_header->gdt_limit = __BOOT_DS + 7; |
Alexander Duyck | fc8d782 | 2012-11-16 13:57:13 -0800 | [diff] [blame] | 142 | trampoline_header->gdt_base = __pa_symbol(boot_gdt); |
Jarkko Sakkinen | 48927bb | 2012-05-08 21:22:28 +0300 | [diff] [blame] | 143 | #else |
H. Peter Anvin | 7960387 | 2012-05-16 13:22:41 -0700 | [diff] [blame] | 144 | /* |
| 145 | * Some AMD processors will #GP(0) if EFER.LMA is set in WRMSR |
| 146 | * so we need to mask it out. |
| 147 | */ |
H. Peter Anvin | 638d957 | 2012-05-16 14:02:05 -0700 | [diff] [blame] | 148 | rdmsrl(MSR_EFER, efer); |
| 149 | trampoline_header->efer = efer & ~EFER_LMA; |
Jarkko Sakkinen | cda846f | 2012-05-08 21:22:46 +0300 | [diff] [blame] | 150 | |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 151 | trampoline_header->start = (u64) secondary_startup_64; |
Jarkko Sakkinen | cda846f | 2012-05-08 21:22:46 +0300 | [diff] [blame] | 152 | trampoline_cr4_features = &trampoline_header->cr4; |
Andy Lutomirski | 18bc7bd | 2016-08-10 02:29:14 -0700 | [diff] [blame] | 153 | *trampoline_cr4_features = mmu_cr4_features; |
Jarkko Sakkinen | cda846f | 2012-05-08 21:22:46 +0300 | [diff] [blame] | 154 | |
Tom Lendacky | 46d010e | 2017-07-17 16:10:25 -0500 | [diff] [blame] | 155 | trampoline_header->flags = 0; |
Tom Lendacky | 46d010e | 2017-07-17 16:10:25 -0500 | [diff] [blame] | 156 | |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 157 | trampoline_pgd = (u64 *) __va(real_mode_header->trampoline_pgd); |
Joerg Roedel | 51523ed | 2021-12-02 16:32:26 +0100 | [diff] [blame] | 158 | |
| 159 | /* Map the real mode stub as virtual == physical */ |
Thomas Garnier | b234e8a | 2016-06-21 17:47:01 -0700 | [diff] [blame] | 160 | trampoline_pgd[0] = trampoline_pgd_entry.pgd; |
Joerg Roedel | 51523ed | 2021-12-02 16:32:26 +0100 | [diff] [blame] | 161 | |
| 162 | /* |
| 163 | * Include the entirety of the kernel mapping into the trampoline |
| 164 | * PGD. This way, all mappings present in the normal kernel page |
| 165 | * tables are usable while running on trampoline_pgd. |
| 166 | */ |
| 167 | for (i = pgd_index(__PAGE_OFFSET); i < PTRS_PER_PGD; i++) |
| 168 | trampoline_pgd[i] = init_top_pgt[i].pgd; |
Jarkko Sakkinen | 48927bb | 2012-05-08 21:22:28 +0300 | [diff] [blame] | 169 | #endif |
Tom Lendacky | 8940ac9 | 2020-09-07 15:16:07 +0200 | [diff] [blame] | 170 | |
| 171 | sme_sev_setup_real_mode(trampoline_header); |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 172 | } |
| 173 | |
| 174 | /* |
Yinghai Lu | 4f7b9226 | 2013-01-24 12:19:51 -0800 | [diff] [blame] | 175 | * reserve_real_mode() gets called very early, to guarantee the |
Yinghai Lu | 231b364 | 2013-01-24 12:19:47 -0800 | [diff] [blame] | 176 | * availability of low memory. This is before the proper kernel page |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 177 | * tables are set up, so we cannot set page permissions in that |
Yinghai Lu | 231b364 | 2013-01-24 12:19:47 -0800 | [diff] [blame] | 178 | * function. Also trampoline code will be executed by APs so we |
| 179 | * need to mark it executable at do_pre_smp_initcalls() at least, |
| 180 | * thus run it as a early_initcall(). |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 181 | */ |
Andy Lutomirski | d0de0f6 | 2016-08-10 02:29:15 -0700 | [diff] [blame] | 182 | static void __init set_real_mode_permissions(void) |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 183 | { |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 184 | unsigned char *base = (unsigned char *) real_mode_header; |
| 185 | size_t size = PAGE_ALIGN(real_mode_blob_end - real_mode_blob); |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 186 | |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 187 | size_t ro_size = |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 188 | PAGE_ALIGN(real_mode_header->ro_end) - |
| 189 | __pa(base); |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 190 | |
| 191 | size_t text_size = |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 192 | PAGE_ALIGN(real_mode_header->ro_end) - |
| 193 | real_mode_header->text_start; |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 194 | |
| 195 | unsigned long text_start = |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 196 | (unsigned long) __va(real_mode_header->text_start); |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 197 | |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 198 | set_memory_nx((unsigned long) base, size >> PAGE_SHIFT); |
| 199 | set_memory_ro((unsigned long) base, ro_size >> PAGE_SHIFT); |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 200 | set_memory_x((unsigned long) text_start, text_size >> PAGE_SHIFT); |
Andy Lutomirski | d0de0f6 | 2016-08-10 02:29:15 -0700 | [diff] [blame] | 201 | } |
| 202 | |
| 203 | static int __init init_real_mode(void) |
| 204 | { |
| 205 | if (!real_mode_header) |
| 206 | panic("Real mode trampoline was not allocated"); |
| 207 | |
| 208 | setup_real_mode(); |
| 209 | set_real_mode_permissions(); |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 210 | |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 211 | return 0; |
| 212 | } |
Andy Lutomirski | d0de0f6 | 2016-08-10 02:29:15 -0700 | [diff] [blame] | 213 | early_initcall(init_real_mode); |