Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | // Copyright (C) 2019 Andes Technology Corporation |
| 3 | |
| 4 | #include <linux/pfn.h> |
| 5 | #include <linux/init_task.h> |
| 6 | #include <linux/kasan.h> |
| 7 | #include <linux/kernel.h> |
| 8 | #include <linux/memblock.h> |
Mike Rapoport | ca5999f | 2020-06-08 21:32:38 -0700 | [diff] [blame] | 9 | #include <linux/pgtable.h> |
Mike Rapoport | 65fddcf | 2020-06-08 21:32:42 -0700 | [diff] [blame] | 10 | #include <asm/tlbflush.h> |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 11 | #include <asm/fixmap.h> |
Nylon Chen | e178d67 | 2021-01-16 13:58:35 +0800 | [diff] [blame] | 12 | #include <asm/pgalloc.h> |
| 13 | |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 14 | extern pgd_t early_pg_dir[PTRS_PER_PGD]; |
| 15 | asmlinkage void __init kasan_early_init(void) |
| 16 | { |
| 17 | uintptr_t i; |
| 18 | pgd_t *pgd = early_pg_dir + pgd_index(KASAN_SHADOW_START); |
| 19 | |
| 20 | for (i = 0; i < PTRS_PER_PTE; ++i) |
| 21 | set_pte(kasan_early_shadow_pte + i, |
| 22 | mk_pte(virt_to_page(kasan_early_shadow_page), |
Zong Li | 8458ca1 | 2020-02-07 17:52:45 +0800 | [diff] [blame] | 23 | PAGE_KERNEL)); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 24 | |
| 25 | for (i = 0; i < PTRS_PER_PMD; ++i) |
| 26 | set_pmd(kasan_early_shadow_pmd + i, |
Zong Li | 8458ca1 | 2020-02-07 17:52:45 +0800 | [diff] [blame] | 27 | pfn_pmd(PFN_DOWN |
| 28 | (__pa((uintptr_t) kasan_early_shadow_pte)), |
| 29 | __pgprot(_PAGE_TABLE))); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 30 | |
| 31 | for (i = KASAN_SHADOW_START; i < KASAN_SHADOW_END; |
| 32 | i += PGDIR_SIZE, ++pgd) |
| 33 | set_pgd(pgd, |
Zong Li | 8458ca1 | 2020-02-07 17:52:45 +0800 | [diff] [blame] | 34 | pfn_pgd(PFN_DOWN |
| 35 | (__pa(((uintptr_t) kasan_early_shadow_pmd))), |
| 36 | __pgprot(_PAGE_TABLE))); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 37 | |
| 38 | /* init for swapper_pg_dir */ |
| 39 | pgd = pgd_offset_k(KASAN_SHADOW_START); |
| 40 | |
| 41 | for (i = KASAN_SHADOW_START; i < KASAN_SHADOW_END; |
| 42 | i += PGDIR_SIZE, ++pgd) |
| 43 | set_pgd(pgd, |
Zong Li | 8458ca1 | 2020-02-07 17:52:45 +0800 | [diff] [blame] | 44 | pfn_pgd(PFN_DOWN |
| 45 | (__pa(((uintptr_t) kasan_early_shadow_pmd))), |
| 46 | __pgprot(_PAGE_TABLE))); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 47 | |
Vincent Chen | 4cb699d | 2020-07-10 10:40:54 +0800 | [diff] [blame] | 48 | local_flush_tlb_all(); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 49 | } |
| 50 | |
Jisheng Zhang | 1987501 | 2021-03-30 02:22:21 +0800 | [diff] [blame] | 51 | static void __init kasan_populate_pte(pmd_t *pmd, unsigned long vaddr, unsigned long end) |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 52 | { |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 53 | phys_addr_t phys_addr; |
| 54 | pte_t *ptep, *base_pte; |
| 55 | |
| 56 | if (pmd_none(*pmd)) |
| 57 | base_pte = memblock_alloc(PTRS_PER_PTE * sizeof(pte_t), PAGE_SIZE); |
| 58 | else |
| 59 | base_pte = (pte_t *)pmd_page_vaddr(*pmd); |
| 60 | |
| 61 | ptep = base_pte + pte_index(vaddr); |
| 62 | |
| 63 | do { |
| 64 | if (pte_none(*ptep)) { |
| 65 | phys_addr = memblock_phys_alloc(PAGE_SIZE, PAGE_SIZE); |
| 66 | set_pte(ptep, pfn_pte(PFN_DOWN(phys_addr), PAGE_KERNEL)); |
| 67 | } |
| 68 | } while (ptep++, vaddr += PAGE_SIZE, vaddr != end); |
| 69 | |
| 70 | set_pmd(pmd, pfn_pmd(PFN_DOWN(__pa(base_pte)), PAGE_TABLE)); |
| 71 | } |
| 72 | |
Jisheng Zhang | 1987501 | 2021-03-30 02:22:21 +0800 | [diff] [blame] | 73 | static void __init kasan_populate_pmd(pgd_t *pgd, unsigned long vaddr, unsigned long end) |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 74 | { |
| 75 | phys_addr_t phys_addr; |
| 76 | pmd_t *pmdp, *base_pmd; |
| 77 | unsigned long next; |
| 78 | |
| 79 | base_pmd = (pmd_t *)pgd_page_vaddr(*pgd); |
| 80 | if (base_pmd == lm_alias(kasan_early_shadow_pmd)) |
| 81 | base_pmd = memblock_alloc(PTRS_PER_PMD * sizeof(pmd_t), PAGE_SIZE); |
| 82 | |
| 83 | pmdp = base_pmd + pmd_index(vaddr); |
| 84 | |
| 85 | do { |
| 86 | next = pmd_addr_end(vaddr, end); |
Alexandre Ghiti | d7fbcf40 | 2021-02-08 14:30:17 -0500 | [diff] [blame] | 87 | |
| 88 | if (pmd_none(*pmdp) && IS_ALIGNED(vaddr, PMD_SIZE) && (next - vaddr) >= PMD_SIZE) { |
| 89 | phys_addr = memblock_phys_alloc(PMD_SIZE, PMD_SIZE); |
| 90 | if (phys_addr) { |
| 91 | set_pmd(pmdp, pfn_pmd(PFN_DOWN(phys_addr), PAGE_KERNEL)); |
| 92 | continue; |
| 93 | } |
| 94 | } |
| 95 | |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 96 | kasan_populate_pte(pmdp, vaddr, next); |
| 97 | } while (pmdp++, vaddr = next, vaddr != end); |
| 98 | |
| 99 | /* |
| 100 | * Wait for the whole PGD to be populated before setting the PGD in |
| 101 | * the page table, otherwise, if we did set the PGD before populating |
| 102 | * it entirely, memblock could allocate a page at a physical address |
| 103 | * where KASAN is not populated yet and then we'd get a page fault. |
| 104 | */ |
| 105 | set_pgd(pgd, pfn_pgd(PFN_DOWN(__pa(base_pmd)), PAGE_TABLE)); |
| 106 | } |
| 107 | |
Jisheng Zhang | 1987501 | 2021-03-30 02:22:21 +0800 | [diff] [blame] | 108 | static void __init kasan_populate_pgd(unsigned long vaddr, unsigned long end) |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 109 | { |
| 110 | phys_addr_t phys_addr; |
| 111 | pgd_t *pgdp = pgd_offset_k(vaddr); |
| 112 | unsigned long next; |
| 113 | |
| 114 | do { |
| 115 | next = pgd_addr_end(vaddr, end); |
Alexandre Ghiti | d7fbcf40 | 2021-02-08 14:30:17 -0500 | [diff] [blame] | 116 | |
| 117 | /* |
| 118 | * pgdp can't be none since kasan_early_init initialized all KASAN |
| 119 | * shadow region with kasan_early_shadow_pmd: if this is stillthe case, |
| 120 | * that means we can try to allocate a hugepage as a replacement. |
| 121 | */ |
| 122 | if (pgd_page_vaddr(*pgdp) == (unsigned long)lm_alias(kasan_early_shadow_pmd) && |
| 123 | IS_ALIGNED(vaddr, PGDIR_SIZE) && (next - vaddr) >= PGDIR_SIZE) { |
| 124 | phys_addr = memblock_phys_alloc(PGDIR_SIZE, PGDIR_SIZE); |
| 125 | if (phys_addr) { |
| 126 | set_pgd(pgdp, pfn_pgd(PFN_DOWN(phys_addr), PAGE_KERNEL)); |
| 127 | continue; |
| 128 | } |
| 129 | } |
| 130 | |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 131 | kasan_populate_pmd(pgdp, vaddr, next); |
| 132 | } while (pgdp++, vaddr = next, vaddr != end); |
| 133 | } |
| 134 | |
| 135 | static void __init kasan_populate(void *start, void *end) |
| 136 | { |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 137 | unsigned long vaddr = (unsigned long)start & PAGE_MASK; |
| 138 | unsigned long vend = PAGE_ALIGN((unsigned long)end); |
Zong Li | a0a31fd | 2020-02-07 17:52:44 +0800 | [diff] [blame] | 139 | |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 140 | kasan_populate_pgd(vaddr, vend); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 141 | |
Vincent Chen | 4cb699d | 2020-07-10 10:40:54 +0800 | [diff] [blame] | 142 | local_flush_tlb_all(); |
Alexandre Ghiti | 9484e2a | 2021-02-08 14:30:15 -0500 | [diff] [blame] | 143 | memset(start, KASAN_SHADOW_INIT, end - start); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 144 | } |
| 145 | |
Alexandre Ghiti | 2da073c | 2021-03-13 03:45:05 -0500 | [diff] [blame] | 146 | static void __init kasan_shallow_populate_pgd(unsigned long vaddr, unsigned long end) |
| 147 | { |
| 148 | unsigned long next; |
| 149 | void *p; |
| 150 | pgd_t *pgd_k = pgd_offset_k(vaddr); |
| 151 | |
| 152 | do { |
| 153 | next = pgd_addr_end(vaddr, end); |
| 154 | if (pgd_page_vaddr(*pgd_k) == (unsigned long)lm_alias(kasan_early_shadow_pmd)) { |
| 155 | p = memblock_alloc(PAGE_SIZE, PAGE_SIZE); |
| 156 | set_pgd(pgd_k, pfn_pgd(PFN_DOWN(__pa(p)), PAGE_TABLE)); |
| 157 | } |
| 158 | } while (pgd_k++, vaddr = next, vaddr != end); |
| 159 | } |
| 160 | |
Palmer Dabbelt | 78947bd | 2021-03-16 22:01:04 -0700 | [diff] [blame] | 161 | static void __init kasan_shallow_populate(void *start, void *end) |
Nylon Chen | e178d67 | 2021-01-16 13:58:35 +0800 | [diff] [blame] | 162 | { |
| 163 | unsigned long vaddr = (unsigned long)start & PAGE_MASK; |
| 164 | unsigned long vend = PAGE_ALIGN((unsigned long)end); |
Nylon Chen | e178d67 | 2021-01-16 13:58:35 +0800 | [diff] [blame] | 165 | |
Alexandre Ghiti | 2da073c | 2021-03-13 03:45:05 -0500 | [diff] [blame] | 166 | kasan_shallow_populate_pgd(vaddr, vend); |
Alexandre Ghiti | f3773dd | 2021-03-13 03:45:04 -0500 | [diff] [blame] | 167 | local_flush_tlb_all(); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 168 | } |
| 169 | |
| 170 | void __init kasan_init(void) |
| 171 | { |
Jisheng Zhang | 314b781 | 2021-06-18 22:01:36 +0800 | [diff] [blame] | 172 | phys_addr_t p_start, p_end; |
Mike Rapoport | b10d6bc | 2020-10-13 16:58:08 -0700 | [diff] [blame] | 173 | u64 i; |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 174 | |
Alexandre Ghiti | 2bfc6cd | 2021-04-11 12:41:44 -0400 | [diff] [blame] | 175 | /* |
| 176 | * Populate all kernel virtual address space with kasan_early_shadow_page |
| 177 | * except for the linear mapping and the modules/kernel/BPF mapping. |
| 178 | */ |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 179 | kasan_populate_early_shadow((void *)KASAN_SHADOW_START, |
Zong Li | 8458ca1 | 2020-02-07 17:52:45 +0800 | [diff] [blame] | 180 | (void *)kasan_mem_to_shadow((void *) |
Nylon Chen | e178d67 | 2021-01-16 13:58:35 +0800 | [diff] [blame] | 181 | VMEMMAP_END)); |
| 182 | if (IS_ENABLED(CONFIG_KASAN_VMALLOC)) |
| 183 | kasan_shallow_populate( |
| 184 | (void *)kasan_mem_to_shadow((void *)VMALLOC_START), |
| 185 | (void *)kasan_mem_to_shadow((void *)VMALLOC_END)); |
| 186 | else |
| 187 | kasan_populate_early_shadow( |
| 188 | (void *)kasan_mem_to_shadow((void *)VMALLOC_START), |
| 189 | (void *)kasan_mem_to_shadow((void *)VMALLOC_END)); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 190 | |
Alexandre Ghiti | 2bfc6cd | 2021-04-11 12:41:44 -0400 | [diff] [blame] | 191 | /* Populate the linear mapping */ |
Jisheng Zhang | 314b781 | 2021-06-18 22:01:36 +0800 | [diff] [blame] | 192 | for_each_mem_range(i, &p_start, &p_end) { |
| 193 | void *start = (void *)__va(p_start); |
| 194 | void *end = (void *)__va(p_end); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 195 | |
| 196 | if (start >= end) |
| 197 | break; |
| 198 | |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 199 | kasan_populate(kasan_mem_to_shadow(start), kasan_mem_to_shadow(end)); |
Yang Li | 9d8c7d9 | 2021-03-22 16:38:36 +0800 | [diff] [blame] | 200 | } |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 201 | |
Alexandre Ghiti | 2bfc6cd | 2021-04-11 12:41:44 -0400 | [diff] [blame] | 202 | /* Populate kernel, BPF, modules mapping */ |
| 203 | kasan_populate(kasan_mem_to_shadow((const void *)MODULES_VADDR), |
Jisheng Zhang | 3a02764 | 2021-06-18 22:09:13 +0800 | [diff] [blame^] | 204 | kasan_mem_to_shadow((const void *)MODULES_VADDR + SZ_2G)); |
Alexandre Ghiti | 2bfc6cd | 2021-04-11 12:41:44 -0400 | [diff] [blame] | 205 | |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 206 | for (i = 0; i < PTRS_PER_PTE; i++) |
| 207 | set_pte(&kasan_early_shadow_pte[i], |
| 208 | mk_pte(virt_to_page(kasan_early_shadow_page), |
Zong Li | 8458ca1 | 2020-02-07 17:52:45 +0800 | [diff] [blame] | 209 | __pgprot(_PAGE_PRESENT | _PAGE_READ | |
| 210 | _PAGE_ACCESSED))); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 211 | |
Alexandre Ghiti | 9484e2a | 2021-02-08 14:30:15 -0500 | [diff] [blame] | 212 | memset(kasan_early_shadow_page, KASAN_SHADOW_INIT, PAGE_SIZE); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 213 | init_task.kasan_depth = 0; |
| 214 | } |