blob: 483b9dbe0970a2cd0f61e6e05c56f0175ece802e [file] [log] [blame]
Vasily Gorbik42db5ed2017-11-17 14:29:13 +01001// SPDX-License-Identifier: GPL-2.0
2#include <linux/kasan.h>
3#include <linux/sched/task.h>
4#include <linux/memblock.h>
Mike Rapoportca5999f2020-06-08 21:32:38 -07005#include <linux/pgtable.h>
Mike Rapoport65fddcf2020-06-08 21:32:42 -07006#include <asm/pgalloc.h>
Vasily Gorbik42db5ed2017-11-17 14:29:13 +01007#include <asm/kasan.h>
Vasily Gorbik29635232018-09-13 10:59:25 +02008#include <asm/mem_detect.h>
Vasily Gorbik42db5ed2017-11-17 14:29:13 +01009#include <asm/processor.h>
10#include <asm/sclp.h>
Vasily Gorbikd58106c2017-11-17 18:44:28 +010011#include <asm/facility.h>
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010012#include <asm/sections.h>
13#include <asm/setup.h>
Vasily Gorbikc360c9a2020-09-11 11:44:47 +020014#include <asm/uv.h>
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010015
Vasily Gorbikd58106c2017-11-17 18:44:28 +010016static unsigned long segment_pos __initdata;
17static unsigned long segment_low __initdata;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010018static unsigned long pgalloc_pos __initdata;
19static unsigned long pgalloc_low __initdata;
Vasily Gorbik135ff162017-11-20 12:56:10 +010020static unsigned long pgalloc_freeable __initdata;
Vasily Gorbikd58106c2017-11-17 18:44:28 +010021static bool has_edat __initdata;
22static bool has_nx __initdata;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010023
24#define __sha(x) ((unsigned long)kasan_mem_to_shadow((void *)x))
25
26static pgd_t early_pg_dir[PTRS_PER_PGD] __initdata __aligned(PAGE_SIZE);
27
28static void __init kasan_early_panic(const char *reason)
29{
30 sclp_early_printk("The Linux kernel failed to boot with the KernelAddressSanitizer:\n");
31 sclp_early_printk(reason);
Vasily Gorbik184c4602019-05-08 13:36:06 +020032 disabled_wait();
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010033}
34
Vasily Gorbikd58106c2017-11-17 18:44:28 +010035static void * __init kasan_early_alloc_segment(void)
36{
37 segment_pos -= _SEGMENT_SIZE;
38
39 if (segment_pos < segment_low)
40 kasan_early_panic("out of memory during initialisation\n");
41
42 return (void *)segment_pos;
43}
44
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010045static void * __init kasan_early_alloc_pages(unsigned int order)
46{
47 pgalloc_pos -= (PAGE_SIZE << order);
48
49 if (pgalloc_pos < pgalloc_low)
50 kasan_early_panic("out of memory during initialisation\n");
51
52 return (void *)pgalloc_pos;
53}
54
55static void * __init kasan_early_crst_alloc(unsigned long val)
56{
57 unsigned long *table;
58
59 table = kasan_early_alloc_pages(CRST_ALLOC_ORDER);
60 if (table)
61 crst_table_init(table, val);
62 return table;
63}
64
65static pte_t * __init kasan_early_pte_alloc(void)
66{
67 static void *pte_leftover;
68 pte_t *pte;
69
70 BUILD_BUG_ON(_PAGE_TABLE_SIZE * 2 != PAGE_SIZE);
71
72 if (!pte_leftover) {
73 pte_leftover = kasan_early_alloc_pages(0);
74 pte = pte_leftover + _PAGE_TABLE_SIZE;
75 } else {
76 pte = pte_leftover;
77 pte_leftover = NULL;
78 }
79 memset64((u64 *)pte, _PAGE_INVALID, PTRS_PER_PTE);
80 return pte;
81}
82
83enum populate_mode {
84 POPULATE_ONE2ONE,
85 POPULATE_MAP,
Vasily Gorbik3e39ce22019-08-02 12:42:59 +020086 POPULATE_ZERO_SHADOW,
87 POPULATE_SHALLOW
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010088};
Vasily Gorbik92bca2f2020-10-05 09:13:15 +020089static void __init kasan_early_pgtable_populate(unsigned long address,
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010090 unsigned long end,
91 enum populate_mode mode)
92{
Vasily Gorbikd58106c2017-11-17 18:44:28 +010093 unsigned long pgt_prot_zero, pgt_prot, sgt_prot;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +010094 pgd_t *pg_dir;
95 p4d_t *p4_dir;
96 pud_t *pu_dir;
97 pmd_t *pm_dir;
98 pte_t *pt_dir;
99
100 pgt_prot_zero = pgprot_val(PAGE_KERNEL_RO);
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100101 if (!has_nx)
102 pgt_prot_zero &= ~_PAGE_NOEXEC;
Vasily Gorbikd411e3c2020-09-10 22:25:13 +0200103 pgt_prot = pgprot_val(PAGE_KERNEL);
104 sgt_prot = pgprot_val(SEGMENT_KERNEL);
105 if (!has_nx || mode == POPULATE_ONE2ONE) {
106 pgt_prot &= ~_PAGE_NOEXEC;
107 sgt_prot &= ~_SEGMENT_ENTRY_NOEXEC;
108 }
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100109
Alexander Gordeevddd63c82021-08-06 12:55:08 +0200110 /*
111 * The first 1MB of 1:1 mapping is mapped with 4KB pages
112 */
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100113 while (address < end) {
114 pg_dir = pgd_offset_k(address);
115 if (pgd_none(*pg_dir)) {
116 if (mode == POPULATE_ZERO_SHADOW &&
117 IS_ALIGNED(address, PGDIR_SIZE) &&
118 end - address >= PGDIR_SIZE) {
Andrey Konovalov9577dd72018-12-28 00:30:01 -0800119 pgd_populate(&init_mm, pg_dir,
120 kasan_early_shadow_p4d);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100121 address = (address + PGDIR_SIZE) & PGDIR_MASK;
122 continue;
123 }
124 p4_dir = kasan_early_crst_alloc(_REGION2_ENTRY_EMPTY);
125 pgd_populate(&init_mm, pg_dir, p4_dir);
126 }
127
Vasily Gorbika3453d92020-10-15 10:01:42 +0200128 if (mode == POPULATE_SHALLOW) {
Vasily Gorbik3e39ce22019-08-02 12:42:59 +0200129 address = (address + P4D_SIZE) & P4D_MASK;
130 continue;
131 }
132
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100133 p4_dir = p4d_offset(pg_dir, address);
134 if (p4d_none(*p4_dir)) {
135 if (mode == POPULATE_ZERO_SHADOW &&
136 IS_ALIGNED(address, P4D_SIZE) &&
137 end - address >= P4D_SIZE) {
Andrey Konovalov9577dd72018-12-28 00:30:01 -0800138 p4d_populate(&init_mm, p4_dir,
139 kasan_early_shadow_pud);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100140 address = (address + P4D_SIZE) & P4D_MASK;
141 continue;
142 }
143 pu_dir = kasan_early_crst_alloc(_REGION3_ENTRY_EMPTY);
144 p4d_populate(&init_mm, p4_dir, pu_dir);
145 }
146
147 pu_dir = pud_offset(p4_dir, address);
148 if (pud_none(*pu_dir)) {
149 if (mode == POPULATE_ZERO_SHADOW &&
150 IS_ALIGNED(address, PUD_SIZE) &&
151 end - address >= PUD_SIZE) {
Andrey Konovalov9577dd72018-12-28 00:30:01 -0800152 pud_populate(&init_mm, pu_dir,
153 kasan_early_shadow_pmd);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100154 address = (address + PUD_SIZE) & PUD_MASK;
155 continue;
156 }
157 pm_dir = kasan_early_crst_alloc(_SEGMENT_ENTRY_EMPTY);
158 pud_populate(&init_mm, pu_dir, pm_dir);
159 }
160
161 pm_dir = pmd_offset(pu_dir, address);
162 if (pmd_none(*pm_dir)) {
Alexander Gordeevddd63c82021-08-06 12:55:08 +0200163 if (IS_ALIGNED(address, PMD_SIZE) &&
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100164 end - address >= PMD_SIZE) {
Alexander Gordeevddd63c82021-08-06 12:55:08 +0200165 if (mode == POPULATE_ZERO_SHADOW) {
166 pmd_populate(&init_mm, pm_dir, kasan_early_shadow_pte);
167 address = (address + PMD_SIZE) & PMD_MASK;
168 continue;
169 } else if (has_edat && address) {
170 void *page;
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100171
Alexander Gordeevddd63c82021-08-06 12:55:08 +0200172 if (mode == POPULATE_ONE2ONE) {
173 page = (void *)address;
174 } else {
175 page = kasan_early_alloc_segment();
176 memset(page, 0, _SEGMENT_SIZE);
177 }
178 pmd_val(*pm_dir) = __pa(page) | sgt_prot;
179 address = (address + PMD_SIZE) & PMD_MASK;
180 continue;
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100181 }
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100182 }
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100183 pt_dir = kasan_early_pte_alloc();
184 pmd_populate(&init_mm, pm_dir, pt_dir);
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100185 } else if (pmd_large(*pm_dir)) {
186 address = (address + PMD_SIZE) & PMD_MASK;
187 continue;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100188 }
189
190 pt_dir = pte_offset_kernel(pm_dir, address);
191 if (pte_none(*pt_dir)) {
192 void *page;
193
194 switch (mode) {
195 case POPULATE_ONE2ONE:
196 page = (void *)address;
197 pte_val(*pt_dir) = __pa(page) | pgt_prot;
198 break;
199 case POPULATE_MAP:
200 page = kasan_early_alloc_pages(0);
201 memset(page, 0, PAGE_SIZE);
202 pte_val(*pt_dir) = __pa(page) | pgt_prot;
203 break;
204 case POPULATE_ZERO_SHADOW:
Andrey Konovalov9577dd72018-12-28 00:30:01 -0800205 page = kasan_early_shadow_page;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100206 pte_val(*pt_dir) = __pa(page) | pgt_prot_zero;
207 break;
Vasily Gorbik3e39ce22019-08-02 12:42:59 +0200208 case POPULATE_SHALLOW:
209 /* should never happen */
210 break;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100211 }
212 }
213 address += PAGE_SIZE;
214 }
215}
216
217static void __init kasan_set_pgd(pgd_t *pgd, unsigned long asce_type)
218{
219 unsigned long asce_bits;
220
221 asce_bits = asce_type | _ASCE_TABLE_LENGTH;
222 S390_lowcore.kernel_asce = (__pa(pgd) & PAGE_MASK) | asce_bits;
223 S390_lowcore.user_asce = S390_lowcore.kernel_asce;
224
225 __ctl_load(S390_lowcore.kernel_asce, 1, 1);
226 __ctl_load(S390_lowcore.kernel_asce, 7, 7);
227 __ctl_load(S390_lowcore.kernel_asce, 13, 13);
228}
229
230static void __init kasan_enable_dat(void)
231{
232 psw_t psw;
233
234 psw.mask = __extract_psw();
235 psw_bits(psw).dat = 1;
236 psw_bits(psw).as = PSW_BITS_AS_HOME;
237 __load_psw_mask(psw.mask);
238}
239
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100240static void __init kasan_early_detect_facilities(void)
241{
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100242 if (test_facility(8)) {
243 has_edat = true;
244 __ctl_set_bit(0, 23);
245 }
246 if (!noexec_disabled && test_facility(130)) {
247 has_nx = true;
248 __ctl_set_bit(0, 20);
249 }
250}
251
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100252void __init kasan_early_init(void)
253{
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100254 unsigned long shadow_alloc_size;
255 unsigned long initrd_end;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100256 unsigned long memsize;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100257 unsigned long pgt_prot = pgprot_val(PAGE_KERNEL_RO);
258 pte_t pte_z;
Andrey Konovalov9577dd72018-12-28 00:30:01 -0800259 pmd_t pmd_z = __pmd(__pa(kasan_early_shadow_pte) | _SEGMENT_ENTRY);
260 pud_t pud_z = __pud(__pa(kasan_early_shadow_pmd) | _REGION3_ENTRY);
261 p4d_t p4d_z = __p4d(__pa(kasan_early_shadow_pud) | _REGION2_ENTRY);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100262
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100263 kasan_early_detect_facilities();
264 if (!has_nx)
265 pgt_prot &= ~_PAGE_NOEXEC;
Andrey Konovalov9577dd72018-12-28 00:30:01 -0800266 pte_z = __pte(__pa(kasan_early_shadow_page) | pgt_prot);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100267
Vasily Gorbik29635232018-09-13 10:59:25 +0200268 memsize = get_mem_detect_end();
269 if (!memsize)
270 kasan_early_panic("cannot detect physical memory size\n");
Vasily Gorbik73045a02020-10-19 11:01:33 +0200271 /*
272 * Kasan currently supports standby memory but only if it follows
273 * online memory (default allocation), i.e. no memory holes.
274 * - memsize represents end of online memory
275 * - ident_map_size represents online + standby and memory limits
276 * accounted.
277 * Kasan maps "memsize" right away.
278 * [0, memsize] - as identity mapping
279 * [__sha(0), __sha(memsize)] - shadow memory for identity mapping
280 * The rest [memsize, ident_map_size] if memsize < ident_map_size
281 * could be mapped/unmapped dynamically later during memory hotplug.
282 */
283 memsize = min(memsize, ident_map_size);
Vasily Gorbik29635232018-09-13 10:59:25 +0200284
Vasily Gorbika3453d92020-10-15 10:01:42 +0200285 BUILD_BUG_ON(!IS_ALIGNED(KASAN_SHADOW_START, P4D_SIZE));
286 BUILD_BUG_ON(!IS_ALIGNED(KASAN_SHADOW_END, P4D_SIZE));
287 crst_table_init((unsigned long *)early_pg_dir, _REGION2_ENTRY_EMPTY);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100288
289 /* init kasan zero shadow */
Andrey Konovalov9577dd72018-12-28 00:30:01 -0800290 crst_table_init((unsigned long *)kasan_early_shadow_p4d,
291 p4d_val(p4d_z));
292 crst_table_init((unsigned long *)kasan_early_shadow_pud,
293 pud_val(pud_z));
294 crst_table_init((unsigned long *)kasan_early_shadow_pmd,
295 pmd_val(pmd_z));
296 memset64((u64 *)kasan_early_shadow_pte, pte_val(pte_z), PTRS_PER_PTE);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100297
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100298 shadow_alloc_size = memsize >> KASAN_SHADOW_SCALE_SHIFT;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100299 pgalloc_low = round_up((unsigned long)_end, _SEGMENT_SIZE);
300 if (IS_ENABLED(CONFIG_BLK_DEV_INITRD)) {
301 initrd_end =
Alexander Egorenkov84733282021-06-15 14:15:07 +0200302 round_up(initrd_data.start + initrd_data.size, _SEGMENT_SIZE);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100303 pgalloc_low = max(pgalloc_low, initrd_end);
304 }
305
306 if (pgalloc_low + shadow_alloc_size > memsize)
307 kasan_early_panic("out of memory during initialisation\n");
308
Vasily Gorbikd58106c2017-11-17 18:44:28 +0100309 if (has_edat) {
310 segment_pos = round_down(memsize, _SEGMENT_SIZE);
311 segment_low = segment_pos - shadow_alloc_size;
312 pgalloc_pos = segment_low;
313 } else {
314 pgalloc_pos = memsize;
315 }
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100316 init_mm.pgd = early_pg_dir;
317 /*
318 * Current memory layout:
Vasily Gorbik3e39ce22019-08-02 12:42:59 +0200319 * +- 0 -------------+ +- shadow start -+
320 * | 1:1 ram mapping | /| 1/8 ram |
321 * | | / | |
322 * +- end of ram ----+ / +----------------+
323 * | ... gap ... | / | |
324 * | |/ | kasan |
325 * +- shadow start --+ | zero |
326 * | 1/8 addr space | | page |
327 * +- shadow end -+ | mapping |
328 * | ... gap ... |\ | (untracked) |
329 * +- vmalloc area -+ \ | |
330 * | vmalloc_size | \ | |
331 * +- modules vaddr -+ \ +----------------+
332 * | 2Gb | \| unmapped | allocated per module
333 * +-----------------+ +- shadow end ---+
334 *
335 * Current memory layout (KASAN_VMALLOC):
336 * +- 0 -------------+ +- shadow start -+
337 * | 1:1 ram mapping | /| 1/8 ram |
338 * | | / | |
339 * +- end of ram ----+ / +----------------+
340 * | ... gap ... | / | kasan |
341 * | |/ | zero |
342 * +- shadow start --+ | page |
343 * | 1/8 addr space | | mapping |
344 * +- shadow end -+ | (untracked) |
345 * | ... gap ... |\ | |
346 * +- vmalloc area -+ \ +- vmalloc area -+
347 * | vmalloc_size | \ |shallow populate|
348 * +- modules vaddr -+ \ +- modules area -+
349 * | 2Gb | \|shallow populate|
350 * +-----------------+ +- shadow end ---+
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100351 */
Vasily Gorbik793213a2017-11-17 18:22:24 +0100352 /* populate kasan shadow (for identity mapping and zero page mapping) */
Vasily Gorbik92bca2f2020-10-05 09:13:15 +0200353 kasan_early_pgtable_populate(__sha(0), __sha(memsize), POPULATE_MAP);
Vasily Gorbik3e39ce22019-08-02 12:42:59 +0200354 if (IS_ENABLED(CONFIG_KASAN_VMALLOC)) {
Vasily Gorbik3e39ce22019-08-02 12:42:59 +0200355 /* shallowly populate kasan shadow for vmalloc and modules */
Vasily Gorbik0c4f2622020-10-06 22:12:39 +0200356 kasan_early_pgtable_populate(__sha(VMALLOC_START), __sha(MODULES_END),
Vasily Gorbik92bca2f2020-10-05 09:13:15 +0200357 POPULATE_SHALLOW);
Vasily Gorbik3e39ce22019-08-02 12:42:59 +0200358 }
359 /* populate kasan shadow for untracked memory */
Vasily Gorbik0c4f2622020-10-06 22:12:39 +0200360 kasan_early_pgtable_populate(__sha(ident_map_size),
361 IS_ENABLED(CONFIG_KASAN_VMALLOC) ?
362 __sha(VMALLOC_START) :
363 __sha(MODULES_VADDR),
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100364 POPULATE_ZERO_SHADOW);
Vasily Gorbik0c4f2622020-10-06 22:12:39 +0200365 kasan_early_pgtable_populate(__sha(MODULES_END), __sha(_REGION1_SIZE),
Vasily Gorbikc360c9a2020-09-11 11:44:47 +0200366 POPULATE_ZERO_SHADOW);
Vasily Gorbik135ff162017-11-20 12:56:10 +0100367 /* memory allocated for identity mapping structs will be freed later */
368 pgalloc_freeable = pgalloc_pos;
369 /* populate identity mapping */
Vasily Gorbik92bca2f2020-10-05 09:13:15 +0200370 kasan_early_pgtable_populate(0, memsize, POPULATE_ONE2ONE);
Vasily Gorbika3453d92020-10-15 10:01:42 +0200371 kasan_set_pgd(early_pg_dir, _ASCE_TYPE_REGION2);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100372 kasan_enable_dat();
373 /* enable kasan */
374 init_task.kasan_depth = 0;
375 memblock_reserve(pgalloc_pos, memsize - pgalloc_pos);
376 sclp_early_printk("KernelAddressSanitizer initialized\n");
377}
378
Vasily Gorbik54b52982020-10-05 10:28:48 +0200379void __init kasan_copy_shadow_mapping(void)
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100380{
381 /*
382 * At this point we are still running on early pages setup early_pg_dir,
383 * while swapper_pg_dir has just been initialized with identity mapping.
384 * Carry over shadow memory region from early_pg_dir to swapper_pg_dir.
385 */
386
387 pgd_t *pg_dir_src;
388 pgd_t *pg_dir_dst;
389 p4d_t *p4_dir_src;
390 p4d_t *p4_dir_dst;
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100391
392 pg_dir_src = pgd_offset_raw(early_pg_dir, KASAN_SHADOW_START);
Vasily Gorbik54b52982020-10-05 10:28:48 +0200393 pg_dir_dst = pgd_offset_raw(init_mm.pgd, KASAN_SHADOW_START);
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100394 p4_dir_src = p4d_offset(pg_dir_src, KASAN_SHADOW_START);
395 p4_dir_dst = p4d_offset(pg_dir_dst, KASAN_SHADOW_START);
Vasily Gorbika3453d92020-10-15 10:01:42 +0200396 memcpy(p4_dir_dst, p4_dir_src,
397 (KASAN_SHADOW_SIZE >> P4D_SHIFT) * sizeof(p4d_t));
Vasily Gorbik42db5ed2017-11-17 14:29:13 +0100398}
Vasily Gorbik135ff162017-11-20 12:56:10 +0100399
400void __init kasan_free_early_identity(void)
401{
Mike Rapoport3ecc6832021-11-05 13:43:19 -0700402 memblock_phys_free(pgalloc_pos, pgalloc_freeable - pgalloc_pos);
Vasily Gorbik135ff162017-11-20 12:56:10 +0100403}