Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | // Copyright (C) 2019 Andes Technology Corporation |
| 3 | |
| 4 | #include <linux/pfn.h> |
| 5 | #include <linux/init_task.h> |
| 6 | #include <linux/kasan.h> |
| 7 | #include <linux/kernel.h> |
| 8 | #include <linux/memblock.h> |
Mike Rapoport | ca5999f | 2020-06-08 21:32:38 -0700 | [diff] [blame] | 9 | #include <linux/pgtable.h> |
Mike Rapoport | 65fddcf | 2020-06-08 21:32:42 -0700 | [diff] [blame] | 10 | #include <asm/tlbflush.h> |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 11 | #include <asm/fixmap.h> |
Nylon Chen | e178d67 | 2021-01-16 13:58:35 +0800 | [diff] [blame] | 12 | #include <asm/pgalloc.h> |
| 13 | |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 14 | /* |
| 15 | * Kasan shadow region must lie at a fixed address across sv39, sv48 and sv57 |
| 16 | * which is right before the kernel. |
| 17 | * |
| 18 | * For sv39, the region is aligned on PGDIR_SIZE so we only need to populate |
| 19 | * the page global directory with kasan_early_shadow_pmd. |
| 20 | * |
| 21 | * For sv48 and sv57, the region is not aligned on PGDIR_SIZE so the mapping |
| 22 | * must be divided as follows: |
| 23 | * - the first PGD entry, although incomplete, is populated with |
| 24 | * kasan_early_shadow_pud/p4d |
| 25 | * - the PGD entries in the middle are populated with kasan_early_shadow_pud/p4d |
| 26 | * - the last PGD entry is shared with the kernel mapping so populated at the |
| 27 | * lower levels pud/p4d |
| 28 | * |
| 29 | * In addition, when shallow populating a kasan region (for example vmalloc), |
| 30 | * this region may also not be aligned on PGDIR size, so we must go down to the |
| 31 | * pud level too. |
| 32 | */ |
| 33 | |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 34 | extern pgd_t early_pg_dir[PTRS_PER_PGD]; |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 35 | |
Jisheng Zhang | 1987501 | 2021-03-30 02:22:21 +0800 | [diff] [blame] | 36 | static void __init kasan_populate_pte(pmd_t *pmd, unsigned long vaddr, unsigned long end) |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 37 | { |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 38 | phys_addr_t phys_addr; |
| 39 | pte_t *ptep, *base_pte; |
| 40 | |
| 41 | if (pmd_none(*pmd)) |
| 42 | base_pte = memblock_alloc(PTRS_PER_PTE * sizeof(pte_t), PAGE_SIZE); |
| 43 | else |
| 44 | base_pte = (pte_t *)pmd_page_vaddr(*pmd); |
| 45 | |
| 46 | ptep = base_pte + pte_index(vaddr); |
| 47 | |
| 48 | do { |
| 49 | if (pte_none(*ptep)) { |
| 50 | phys_addr = memblock_phys_alloc(PAGE_SIZE, PAGE_SIZE); |
| 51 | set_pte(ptep, pfn_pte(PFN_DOWN(phys_addr), PAGE_KERNEL)); |
| 52 | } |
| 53 | } while (ptep++, vaddr += PAGE_SIZE, vaddr != end); |
| 54 | |
| 55 | set_pmd(pmd, pfn_pmd(PFN_DOWN(__pa(base_pte)), PAGE_TABLE)); |
| 56 | } |
| 57 | |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 58 | static void __init kasan_populate_pmd(pud_t *pud, unsigned long vaddr, unsigned long end) |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 59 | { |
| 60 | phys_addr_t phys_addr; |
| 61 | pmd_t *pmdp, *base_pmd; |
| 62 | unsigned long next; |
| 63 | |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 64 | if (pud_none(*pud)) { |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 65 | base_pmd = memblock_alloc(PTRS_PER_PMD * sizeof(pmd_t), PAGE_SIZE); |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 66 | } else { |
| 67 | base_pmd = (pmd_t *)pud_pgtable(*pud); |
| 68 | if (base_pmd == lm_alias(kasan_early_shadow_pmd)) |
| 69 | base_pmd = memblock_alloc(PTRS_PER_PMD * sizeof(pmd_t), PAGE_SIZE); |
| 70 | } |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 71 | |
| 72 | pmdp = base_pmd + pmd_index(vaddr); |
| 73 | |
| 74 | do { |
| 75 | next = pmd_addr_end(vaddr, end); |
Alexandre Ghiti | d7fbcf40 | 2021-02-08 14:30:17 -0500 | [diff] [blame] | 76 | |
| 77 | if (pmd_none(*pmdp) && IS_ALIGNED(vaddr, PMD_SIZE) && (next - vaddr) >= PMD_SIZE) { |
| 78 | phys_addr = memblock_phys_alloc(PMD_SIZE, PMD_SIZE); |
| 79 | if (phys_addr) { |
| 80 | set_pmd(pmdp, pfn_pmd(PFN_DOWN(phys_addr), PAGE_KERNEL)); |
| 81 | continue; |
| 82 | } |
| 83 | } |
| 84 | |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 85 | kasan_populate_pte(pmdp, vaddr, next); |
| 86 | } while (pmdp++, vaddr = next, vaddr != end); |
| 87 | |
| 88 | /* |
| 89 | * Wait for the whole PGD to be populated before setting the PGD in |
| 90 | * the page table, otherwise, if we did set the PGD before populating |
| 91 | * it entirely, memblock could allocate a page at a physical address |
| 92 | * where KASAN is not populated yet and then we'd get a page fault. |
| 93 | */ |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 94 | set_pud(pud, pfn_pud(PFN_DOWN(__pa(base_pmd)), PAGE_TABLE)); |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 95 | } |
| 96 | |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 97 | static void __init kasan_populate_pud(pgd_t *pgd, |
| 98 | unsigned long vaddr, unsigned long end, |
| 99 | bool early) |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 100 | { |
| 101 | phys_addr_t phys_addr; |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 102 | pud_t *pudp, *base_pud; |
| 103 | unsigned long next; |
| 104 | |
| 105 | if (early) { |
| 106 | /* |
| 107 | * We can't use pgd_page_vaddr here as it would return a linear |
| 108 | * mapping address but it is not mapped yet, but when populating |
| 109 | * early_pg_dir, we need the physical address and when populating |
| 110 | * swapper_pg_dir, we need the kernel virtual address so use |
| 111 | * pt_ops facility. |
| 112 | */ |
| 113 | base_pud = pt_ops.get_pud_virt(pfn_to_phys(_pgd_pfn(*pgd))); |
| 114 | } else { |
| 115 | base_pud = (pud_t *)pgd_page_vaddr(*pgd); |
| 116 | if (base_pud == lm_alias(kasan_early_shadow_pud)) |
| 117 | base_pud = memblock_alloc(PTRS_PER_PUD * sizeof(pud_t), PAGE_SIZE); |
| 118 | } |
| 119 | |
| 120 | pudp = base_pud + pud_index(vaddr); |
| 121 | |
| 122 | do { |
| 123 | next = pud_addr_end(vaddr, end); |
| 124 | |
| 125 | if (pud_none(*pudp) && IS_ALIGNED(vaddr, PUD_SIZE) && (next - vaddr) >= PUD_SIZE) { |
| 126 | if (early) { |
| 127 | phys_addr = __pa(((uintptr_t)kasan_early_shadow_pmd)); |
| 128 | set_pud(pudp, pfn_pud(PFN_DOWN(phys_addr), PAGE_TABLE)); |
| 129 | continue; |
| 130 | } else { |
| 131 | phys_addr = memblock_phys_alloc(PUD_SIZE, PUD_SIZE); |
| 132 | if (phys_addr) { |
| 133 | set_pud(pudp, pfn_pud(PFN_DOWN(phys_addr), PAGE_KERNEL)); |
| 134 | continue; |
| 135 | } |
| 136 | } |
| 137 | } |
| 138 | |
| 139 | kasan_populate_pmd(pudp, vaddr, next); |
| 140 | } while (pudp++, vaddr = next, vaddr != end); |
| 141 | |
| 142 | /* |
| 143 | * Wait for the whole PGD to be populated before setting the PGD in |
| 144 | * the page table, otherwise, if we did set the PGD before populating |
| 145 | * it entirely, memblock could allocate a page at a physical address |
| 146 | * where KASAN is not populated yet and then we'd get a page fault. |
| 147 | */ |
| 148 | if (!early) |
| 149 | set_pgd(pgd, pfn_pgd(PFN_DOWN(__pa(base_pud)), PAGE_TABLE)); |
| 150 | } |
| 151 | |
| 152 | #define kasan_early_shadow_pgd_next (pgtable_l4_enabled ? \ |
| 153 | (uintptr_t)kasan_early_shadow_pud : \ |
| 154 | (uintptr_t)kasan_early_shadow_pmd) |
| 155 | #define kasan_populate_pgd_next(pgdp, vaddr, next, early) \ |
| 156 | (pgtable_l4_enabled ? \ |
| 157 | kasan_populate_pud(pgdp, vaddr, next, early) : \ |
| 158 | kasan_populate_pmd((pud_t *)pgdp, vaddr, next)) |
| 159 | |
Alexandre Ghiti | 2efad17 | 2021-12-06 11:46:46 +0100 | [diff] [blame] | 160 | static void __init kasan_populate_pgd(pgd_t *pgdp, |
| 161 | unsigned long vaddr, unsigned long end, |
| 162 | bool early) |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 163 | { |
| 164 | phys_addr_t phys_addr; |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 165 | unsigned long next; |
| 166 | |
| 167 | do { |
| 168 | next = pgd_addr_end(vaddr, end); |
Alexandre Ghiti | d7fbcf40 | 2021-02-08 14:30:17 -0500 | [diff] [blame] | 169 | |
Alexandre Ghiti | 2efad17 | 2021-12-06 11:46:46 +0100 | [diff] [blame] | 170 | if (IS_ALIGNED(vaddr, PGDIR_SIZE) && (next - vaddr) >= PGDIR_SIZE) { |
| 171 | if (early) { |
| 172 | phys_addr = __pa((uintptr_t)kasan_early_shadow_pgd_next); |
| 173 | set_pgd(pgdp, pfn_pgd(PFN_DOWN(phys_addr), PAGE_TABLE)); |
Alexandre Ghiti | d7fbcf40 | 2021-02-08 14:30:17 -0500 | [diff] [blame] | 174 | continue; |
Alexandre Ghiti | 2efad17 | 2021-12-06 11:46:46 +0100 | [diff] [blame] | 175 | } else if (pgd_page_vaddr(*pgdp) == |
| 176 | (unsigned long)lm_alias(kasan_early_shadow_pgd_next)) { |
| 177 | /* |
| 178 | * pgdp can't be none since kasan_early_init |
| 179 | * initialized all KASAN shadow region with |
| 180 | * kasan_early_shadow_pud: if this is still the |
| 181 | * case, that means we can try to allocate a |
| 182 | * hugepage as a replacement. |
| 183 | */ |
| 184 | phys_addr = memblock_phys_alloc(PGDIR_SIZE, PGDIR_SIZE); |
| 185 | if (phys_addr) { |
| 186 | set_pgd(pgdp, pfn_pgd(PFN_DOWN(phys_addr), PAGE_KERNEL)); |
| 187 | continue; |
| 188 | } |
Alexandre Ghiti | d7fbcf40 | 2021-02-08 14:30:17 -0500 | [diff] [blame] | 189 | } |
| 190 | } |
| 191 | |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 192 | kasan_populate_pgd_next(pgdp, vaddr, next, early); |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 193 | } while (pgdp++, vaddr = next, vaddr != end); |
| 194 | } |
| 195 | |
Alexandre Ghiti | 2efad17 | 2021-12-06 11:46:46 +0100 | [diff] [blame] | 196 | asmlinkage void __init kasan_early_init(void) |
| 197 | { |
| 198 | uintptr_t i; |
| 199 | |
| 200 | BUILD_BUG_ON(KASAN_SHADOW_OFFSET != |
| 201 | KASAN_SHADOW_END - (1UL << (64 - KASAN_SHADOW_SCALE_SHIFT))); |
| 202 | |
| 203 | for (i = 0; i < PTRS_PER_PTE; ++i) |
| 204 | set_pte(kasan_early_shadow_pte + i, |
| 205 | mk_pte(virt_to_page(kasan_early_shadow_page), |
| 206 | PAGE_KERNEL)); |
| 207 | |
| 208 | for (i = 0; i < PTRS_PER_PMD; ++i) |
| 209 | set_pmd(kasan_early_shadow_pmd + i, |
| 210 | pfn_pmd(PFN_DOWN |
| 211 | (__pa((uintptr_t)kasan_early_shadow_pte)), |
| 212 | PAGE_TABLE)); |
| 213 | |
| 214 | if (pgtable_l4_enabled) { |
| 215 | for (i = 0; i < PTRS_PER_PUD; ++i) |
| 216 | set_pud(kasan_early_shadow_pud + i, |
| 217 | pfn_pud(PFN_DOWN |
| 218 | (__pa(((uintptr_t)kasan_early_shadow_pmd))), |
| 219 | PAGE_TABLE)); |
| 220 | } |
| 221 | |
| 222 | kasan_populate_pgd(early_pg_dir + pgd_index(KASAN_SHADOW_START), |
| 223 | KASAN_SHADOW_START, KASAN_SHADOW_END, true); |
| 224 | |
| 225 | local_flush_tlb_all(); |
| 226 | } |
| 227 | |
| 228 | void __init kasan_swapper_init(void) |
| 229 | { |
| 230 | kasan_populate_pgd(pgd_offset_k(KASAN_SHADOW_START), |
| 231 | KASAN_SHADOW_START, KASAN_SHADOW_END, true); |
| 232 | |
| 233 | local_flush_tlb_all(); |
| 234 | } |
| 235 | |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 236 | static void __init kasan_populate(void *start, void *end) |
| 237 | { |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 238 | unsigned long vaddr = (unsigned long)start & PAGE_MASK; |
| 239 | unsigned long vend = PAGE_ALIGN((unsigned long)end); |
Zong Li | a0a31fd | 2020-02-07 17:52:44 +0800 | [diff] [blame] | 240 | |
Alexandre Ghiti | 2efad17 | 2021-12-06 11:46:46 +0100 | [diff] [blame] | 241 | kasan_populate_pgd(pgd_offset_k(vaddr), vaddr, vend, false); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 242 | |
Vincent Chen | 4cb699d | 2020-07-10 10:40:54 +0800 | [diff] [blame] | 243 | local_flush_tlb_all(); |
Alexandre Ghiti | 9484e2a | 2021-02-08 14:30:15 -0500 | [diff] [blame] | 244 | memset(start, KASAN_SHADOW_INIT, end - start); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 245 | } |
| 246 | |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 247 | static void __init kasan_shallow_populate_pud(pgd_t *pgdp, |
| 248 | unsigned long vaddr, unsigned long end, |
| 249 | bool kasan_populate) |
| 250 | { |
| 251 | unsigned long next; |
| 252 | pud_t *pudp, *base_pud; |
| 253 | pmd_t *base_pmd; |
| 254 | bool is_kasan_pmd; |
| 255 | |
| 256 | base_pud = (pud_t *)pgd_page_vaddr(*pgdp); |
| 257 | pudp = base_pud + pud_index(vaddr); |
| 258 | |
| 259 | if (kasan_populate) |
| 260 | memcpy(base_pud, (void *)kasan_early_shadow_pgd_next, |
| 261 | sizeof(pud_t) * PTRS_PER_PUD); |
| 262 | |
| 263 | do { |
| 264 | next = pud_addr_end(vaddr, end); |
| 265 | is_kasan_pmd = (pud_pgtable(*pudp) == lm_alias(kasan_early_shadow_pmd)); |
| 266 | |
| 267 | if (is_kasan_pmd) { |
| 268 | base_pmd = memblock_alloc(PAGE_SIZE, PAGE_SIZE); |
| 269 | set_pud(pudp, pfn_pud(PFN_DOWN(__pa(base_pmd)), PAGE_TABLE)); |
| 270 | } |
| 271 | } while (pudp++, vaddr = next, vaddr != end); |
| 272 | } |
| 273 | |
Alexandre Ghiti | 2da073c | 2021-03-13 03:45:05 -0500 | [diff] [blame] | 274 | static void __init kasan_shallow_populate_pgd(unsigned long vaddr, unsigned long end) |
| 275 | { |
| 276 | unsigned long next; |
| 277 | void *p; |
| 278 | pgd_t *pgd_k = pgd_offset_k(vaddr); |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 279 | bool is_kasan_pgd_next; |
Alexandre Ghiti | 2da073c | 2021-03-13 03:45:05 -0500 | [diff] [blame] | 280 | |
| 281 | do { |
| 282 | next = pgd_addr_end(vaddr, end); |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 283 | is_kasan_pgd_next = (pgd_page_vaddr(*pgd_k) == |
| 284 | (unsigned long)lm_alias(kasan_early_shadow_pgd_next)); |
| 285 | |
| 286 | if (is_kasan_pgd_next) { |
Alexandre Ghiti | 2da073c | 2021-03-13 03:45:05 -0500 | [diff] [blame] | 287 | p = memblock_alloc(PAGE_SIZE, PAGE_SIZE); |
| 288 | set_pgd(pgd_k, pfn_pgd(PFN_DOWN(__pa(p)), PAGE_TABLE)); |
| 289 | } |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 290 | |
| 291 | if (IS_ALIGNED(vaddr, PGDIR_SIZE) && (next - vaddr) >= PGDIR_SIZE) |
| 292 | continue; |
| 293 | |
| 294 | kasan_shallow_populate_pud(pgd_k, vaddr, next, is_kasan_pgd_next); |
Alexandre Ghiti | 2da073c | 2021-03-13 03:45:05 -0500 | [diff] [blame] | 295 | } while (pgd_k++, vaddr = next, vaddr != end); |
| 296 | } |
| 297 | |
Palmer Dabbelt | 78947bd | 2021-03-16 22:01:04 -0700 | [diff] [blame] | 298 | static void __init kasan_shallow_populate(void *start, void *end) |
Nylon Chen | e178d67 | 2021-01-16 13:58:35 +0800 | [diff] [blame] | 299 | { |
| 300 | unsigned long vaddr = (unsigned long)start & PAGE_MASK; |
| 301 | unsigned long vend = PAGE_ALIGN((unsigned long)end); |
Nylon Chen | e178d67 | 2021-01-16 13:58:35 +0800 | [diff] [blame] | 302 | |
Alexandre Ghiti | 2da073c | 2021-03-13 03:45:05 -0500 | [diff] [blame] | 303 | kasan_shallow_populate_pgd(vaddr, vend); |
Alexandre Ghiti | f3773dd | 2021-03-13 03:45:04 -0500 | [diff] [blame] | 304 | local_flush_tlb_all(); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 305 | } |
| 306 | |
| 307 | void __init kasan_init(void) |
| 308 | { |
Jisheng Zhang | 314b781 | 2021-06-18 22:01:36 +0800 | [diff] [blame] | 309 | phys_addr_t p_start, p_end; |
Mike Rapoport | b10d6bc | 2020-10-13 16:58:08 -0700 | [diff] [blame] | 310 | u64 i; |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 311 | |
Nylon Chen | e178d67 | 2021-01-16 13:58:35 +0800 | [diff] [blame] | 312 | if (IS_ENABLED(CONFIG_KASAN_VMALLOC)) |
| 313 | kasan_shallow_populate( |
| 314 | (void *)kasan_mem_to_shadow((void *)VMALLOC_START), |
| 315 | (void *)kasan_mem_to_shadow((void *)VMALLOC_END)); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 316 | |
Alexandre Ghiti | 2bfc6cd | 2021-04-11 12:41:44 -0400 | [diff] [blame] | 317 | /* Populate the linear mapping */ |
Jisheng Zhang | 314b781 | 2021-06-18 22:01:36 +0800 | [diff] [blame] | 318 | for_each_mem_range(i, &p_start, &p_end) { |
| 319 | void *start = (void *)__va(p_start); |
| 320 | void *end = (void *)__va(p_end); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 321 | |
| 322 | if (start >= end) |
| 323 | break; |
| 324 | |
Alexandre Ghiti | d127c19 | 2021-02-08 14:30:16 -0500 | [diff] [blame] | 325 | kasan_populate(kasan_mem_to_shadow(start), kasan_mem_to_shadow(end)); |
Yang Li | 9d8c7d9 | 2021-03-22 16:38:36 +0800 | [diff] [blame] | 326 | } |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 327 | |
Alexandre Ghiti | 2bfc6cd | 2021-04-11 12:41:44 -0400 | [diff] [blame] | 328 | /* Populate kernel, BPF, modules mapping */ |
| 329 | kasan_populate(kasan_mem_to_shadow((const void *)MODULES_VADDR), |
Jisheng Zhang | 3a02764 | 2021-06-18 22:09:13 +0800 | [diff] [blame] | 330 | kasan_mem_to_shadow((const void *)MODULES_VADDR + SZ_2G)); |
Alexandre Ghiti | 2bfc6cd | 2021-04-11 12:41:44 -0400 | [diff] [blame] | 331 | |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 332 | for (i = 0; i < PTRS_PER_PTE; i++) |
| 333 | set_pte(&kasan_early_shadow_pte[i], |
| 334 | mk_pte(virt_to_page(kasan_early_shadow_page), |
Zong Li | 8458ca1 | 2020-02-07 17:52:45 +0800 | [diff] [blame] | 335 | __pgprot(_PAGE_PRESENT | _PAGE_READ | |
| 336 | _PAGE_ACCESSED))); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 337 | |
Alexandre Ghiti | 9484e2a | 2021-02-08 14:30:15 -0500 | [diff] [blame] | 338 | memset(kasan_early_shadow_page, KASAN_SHADOW_INIT, PAGE_SIZE); |
Nick Hu | 8ad8b72 | 2020-01-06 10:38:32 -0800 | [diff] [blame] | 339 | init_task.kasan_depth = 0; |
| 340 | } |