Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 1 | #include <linux/io.h> |
| 2 | #include <linux/memblock.h> |
| 3 | |
| 4 | #include <asm/cacheflush.h> |
| 5 | #include <asm/pgtable.h> |
| 6 | #include <asm/realmode.h> |
Andy Lutomirski | 18bc7bd | 2016-08-10 02:29:14 -0700 | [diff] [blame] | 7 | #include <asm/tlbflush.h> |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 8 | |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 9 | struct real_mode_header *real_mode_header; |
Jarkko Sakkinen | cda846f | 2012-05-08 21:22:46 +0300 | [diff] [blame] | 10 | u32 *trampoline_cr4_features; |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 11 | |
Thomas Garnier | b234e8a | 2016-06-21 17:47:01 -0700 | [diff] [blame] | 12 | /* Hold the pgd entry used on booting additional CPUs */ |
| 13 | pgd_t trampoline_pgd_entry; |
| 14 | |
Yinghai Lu | 4f7b9226 | 2013-01-24 12:19:51 -0800 | [diff] [blame] | 15 | void __init reserve_real_mode(void) |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 16 | { |
| 17 | phys_addr_t mem; |
Yinghai Lu | 4f7b9226 | 2013-01-24 12:19:51 -0800 | [diff] [blame] | 18 | unsigned char *base; |
| 19 | size_t size = PAGE_ALIGN(real_mode_blob_end - real_mode_blob); |
| 20 | |
| 21 | /* Has to be under 1M so we can execute real-mode AP code. */ |
| 22 | mem = memblock_find_in_range(0, 1<<20, size, PAGE_SIZE); |
| 23 | if (!mem) |
| 24 | panic("Cannot allocate trampoline\n"); |
| 25 | |
| 26 | base = __va(mem); |
| 27 | memblock_reserve(mem, size); |
| 28 | real_mode_header = (struct real_mode_header *) base; |
| 29 | printk(KERN_DEBUG "Base memory trampoline at [%p] %llx size %zu\n", |
| 30 | base, (unsigned long long)mem, size); |
| 31 | } |
| 32 | |
Andy Lutomirski | d0de0f6 | 2016-08-10 02:29:15 -0700 | [diff] [blame^] | 33 | static void __init setup_real_mode(void) |
Yinghai Lu | 4f7b9226 | 2013-01-24 12:19:51 -0800 | [diff] [blame] | 34 | { |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 35 | u16 real_mode_seg; |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 36 | const u32 *rel; |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 37 | u32 count; |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 38 | unsigned char *base; |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 39 | unsigned long phys_base; |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 40 | struct trampoline_header *trampoline_header; |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 41 | size_t size = PAGE_ALIGN(real_mode_blob_end - real_mode_blob); |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 42 | #ifdef CONFIG_X86_64 |
| 43 | u64 *trampoline_pgd; |
H. Peter Anvin | 638d957 | 2012-05-16 14:02:05 -0700 | [diff] [blame] | 44 | u64 efer; |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 45 | #endif |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 46 | |
Yinghai Lu | 4f7b9226 | 2013-01-24 12:19:51 -0800 | [diff] [blame] | 47 | base = (unsigned char *)real_mode_header; |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 48 | |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 49 | memcpy(base, real_mode_blob, size); |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 50 | |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 51 | phys_base = __pa(base); |
| 52 | real_mode_seg = phys_base >> 4; |
| 53 | |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 54 | rel = (u32 *) real_mode_relocs; |
| 55 | |
| 56 | /* 16-bit segment relocations. */ |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 57 | count = *rel++; |
| 58 | while (count--) { |
| 59 | u16 *seg = (u16 *) (base + *rel++); |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 60 | *seg = real_mode_seg; |
| 61 | } |
| 62 | |
| 63 | /* 32-bit linear relocations. */ |
H. Peter Anvin | 7306006 | 2013-12-18 15:52:13 -0800 | [diff] [blame] | 64 | count = *rel++; |
| 65 | while (count--) { |
| 66 | u32 *ptr = (u32 *) (base + *rel++); |
| 67 | *ptr += phys_base; |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 68 | } |
| 69 | |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 70 | /* Must be perfomed *after* relocation. */ |
| 71 | trampoline_header = (struct trampoline_header *) |
| 72 | __va(real_mode_header->trampoline_header); |
| 73 | |
Jarkko Sakkinen | 48927bb | 2012-05-08 21:22:28 +0300 | [diff] [blame] | 74 | #ifdef CONFIG_X86_32 |
Alexander Duyck | fc8d782 | 2012-11-16 13:57:13 -0800 | [diff] [blame] | 75 | trampoline_header->start = __pa_symbol(startup_32_smp); |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 76 | trampoline_header->gdt_limit = __BOOT_DS + 7; |
Alexander Duyck | fc8d782 | 2012-11-16 13:57:13 -0800 | [diff] [blame] | 77 | trampoline_header->gdt_base = __pa_symbol(boot_gdt); |
Jarkko Sakkinen | 48927bb | 2012-05-08 21:22:28 +0300 | [diff] [blame] | 78 | #else |
H. Peter Anvin | 7960387 | 2012-05-16 13:22:41 -0700 | [diff] [blame] | 79 | /* |
| 80 | * Some AMD processors will #GP(0) if EFER.LMA is set in WRMSR |
| 81 | * so we need to mask it out. |
| 82 | */ |
H. Peter Anvin | 638d957 | 2012-05-16 14:02:05 -0700 | [diff] [blame] | 83 | rdmsrl(MSR_EFER, efer); |
| 84 | trampoline_header->efer = efer & ~EFER_LMA; |
Jarkko Sakkinen | cda846f | 2012-05-08 21:22:46 +0300 | [diff] [blame] | 85 | |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 86 | trampoline_header->start = (u64) secondary_startup_64; |
Jarkko Sakkinen | cda846f | 2012-05-08 21:22:46 +0300 | [diff] [blame] | 87 | trampoline_cr4_features = &trampoline_header->cr4; |
Andy Lutomirski | 18bc7bd | 2016-08-10 02:29:14 -0700 | [diff] [blame] | 88 | *trampoline_cr4_features = mmu_cr4_features; |
Jarkko Sakkinen | cda846f | 2012-05-08 21:22:46 +0300 | [diff] [blame] | 89 | |
Jarkko Sakkinen | f37240f | 2012-05-08 21:22:43 +0300 | [diff] [blame] | 90 | trampoline_pgd = (u64 *) __va(real_mode_header->trampoline_pgd); |
Thomas Garnier | b234e8a | 2016-06-21 17:47:01 -0700 | [diff] [blame] | 91 | trampoline_pgd[0] = trampoline_pgd_entry.pgd; |
Yinghai Lu | 9735e91 | 2013-01-24 12:19:50 -0800 | [diff] [blame] | 92 | trampoline_pgd[511] = init_level4_pgt[511].pgd; |
Jarkko Sakkinen | 48927bb | 2012-05-08 21:22:28 +0300 | [diff] [blame] | 93 | #endif |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 94 | } |
| 95 | |
| 96 | /* |
Yinghai Lu | 4f7b9226 | 2013-01-24 12:19:51 -0800 | [diff] [blame] | 97 | * reserve_real_mode() gets called very early, to guarantee the |
Yinghai Lu | 231b364 | 2013-01-24 12:19:47 -0800 | [diff] [blame] | 98 | * availability of low memory. This is before the proper kernel page |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 99 | * tables are set up, so we cannot set page permissions in that |
Yinghai Lu | 231b364 | 2013-01-24 12:19:47 -0800 | [diff] [blame] | 100 | * function. Also trampoline code will be executed by APs so we |
| 101 | * need to mark it executable at do_pre_smp_initcalls() at least, |
| 102 | * thus run it as a early_initcall(). |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 103 | */ |
Andy Lutomirski | d0de0f6 | 2016-08-10 02:29:15 -0700 | [diff] [blame^] | 104 | static void __init set_real_mode_permissions(void) |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 105 | { |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 106 | unsigned char *base = (unsigned char *) real_mode_header; |
| 107 | size_t size = PAGE_ALIGN(real_mode_blob_end - real_mode_blob); |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 108 | |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 109 | size_t ro_size = |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 110 | PAGE_ALIGN(real_mode_header->ro_end) - |
| 111 | __pa(base); |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 112 | |
| 113 | size_t text_size = |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 114 | PAGE_ALIGN(real_mode_header->ro_end) - |
| 115 | real_mode_header->text_start; |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 116 | |
| 117 | unsigned long text_start = |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 118 | (unsigned long) __va(real_mode_header->text_start); |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 119 | |
Jarkko Sakkinen | b429dbf | 2012-05-08 21:22:41 +0300 | [diff] [blame] | 120 | set_memory_nx((unsigned long) base, size >> PAGE_SHIFT); |
| 121 | set_memory_ro((unsigned long) base, ro_size >> PAGE_SHIFT); |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 122 | set_memory_x((unsigned long) text_start, text_size >> PAGE_SHIFT); |
Andy Lutomirski | d0de0f6 | 2016-08-10 02:29:15 -0700 | [diff] [blame^] | 123 | } |
| 124 | |
| 125 | static int __init init_real_mode(void) |
| 126 | { |
| 127 | if (!real_mode_header) |
| 128 | panic("Real mode trampoline was not allocated"); |
| 129 | |
| 130 | setup_real_mode(); |
| 131 | set_real_mode_permissions(); |
Jarkko Sakkinen | f156ffc | 2012-05-08 21:22:30 +0300 | [diff] [blame] | 132 | |
Jarkko Sakkinen | 084ee1c6 | 2012-05-08 21:22:26 +0300 | [diff] [blame] | 133 | return 0; |
| 134 | } |
Andy Lutomirski | d0de0f6 | 2016-08-10 02:29:15 -0700 | [diff] [blame^] | 135 | early_initcall(init_real_mode); |