Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 2 | /* |
| 3 | * This file contains the routines setting up the linux page tables. |
| 4 | * -- paulus |
| 5 | * |
| 6 | * Derived from arch/ppc/mm/init.c: |
| 7 | * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) |
| 8 | * |
| 9 | * Modifications by Paul Mackerras (PowerMac) (paulus@cs.anu.edu.au) |
| 10 | * and Cort Dougan (PReP) (cort@cs.nmt.edu) |
| 11 | * Copyright (C) 1996 Paul Mackerras |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 12 | * |
| 13 | * Derived from "arch/i386/mm/init.c" |
| 14 | * Copyright (C) 1991, 1992, 1993, 1994 Linus Torvalds |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 15 | */ |
| 16 | |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 17 | #include <linux/kernel.h> |
| 18 | #include <linux/module.h> |
| 19 | #include <linux/types.h> |
| 20 | #include <linux/mm.h> |
| 21 | #include <linux/vmalloc.h> |
| 22 | #include <linux/init.h> |
| 23 | #include <linux/highmem.h> |
Yinghai Lu | 95f72d1 | 2010-07-12 14:36:09 +1000 | [diff] [blame] | 24 | #include <linux/memblock.h> |
Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 25 | #include <linux/slab.h> |
Christophe Leroy | c988cfd | 2021-06-09 11:34:31 +1000 | [diff] [blame] | 26 | #include <linux/set_memory.h> |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 27 | |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 28 | #include <asm/pgalloc.h> |
Kumar Gala | 2c419bd | 2008-04-23 23:05:20 +1000 | [diff] [blame] | 29 | #include <asm/fixmap.h> |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 30 | #include <asm/setup.h> |
Christophe Leroy | 95902e6 | 2017-08-02 15:51:05 +0200 | [diff] [blame] | 31 | #include <asm/sections.h> |
Christophe Leroy | 925ac14 | 2020-05-19 05:48:58 +0000 | [diff] [blame] | 32 | #include <asm/early_ioremap.h> |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 33 | |
Christophe Leroy | 9d9f2cc | 2019-03-29 09:59:59 +0000 | [diff] [blame] | 34 | #include <mm/mmu_decl.h> |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 35 | |
Christophe Leroy | 925ac14 | 2020-05-19 05:48:58 +0000 | [diff] [blame] | 36 | static u8 early_fixmap_pagetable[FIXMAP_PTE_SIZE] __page_aligned_data; |
| 37 | |
| 38 | notrace void __init early_ioremap_init(void) |
| 39 | { |
| 40 | unsigned long addr = ALIGN_DOWN(FIXADDR_START, PGDIR_SIZE); |
| 41 | pte_t *ptep = (pte_t *)early_fixmap_pagetable; |
Mike Rapoport | e05c7b1 | 2020-06-08 21:33:05 -0700 | [diff] [blame] | 42 | pmd_t *pmdp = pmd_off_k(addr); |
Christophe Leroy | 925ac14 | 2020-05-19 05:48:58 +0000 | [diff] [blame] | 43 | |
| 44 | for (; (s32)(FIXADDR_TOP - addr) > 0; |
| 45 | addr += PGDIR_SIZE, ptep += PTRS_PER_PTE, pmdp++) |
| 46 | pmd_populate_kernel(&init_mm, pmdp, ptep); |
| 47 | |
| 48 | early_ioremap_setup(); |
| 49 | } |
| 50 | |
Christophe Leroy | 4a6d8cf | 2019-04-26 15:58:06 +0000 | [diff] [blame] | 51 | static void __init *early_alloc_pgtable(unsigned long size) |
| 52 | { |
| 53 | void *ptr = memblock_alloc(size, size); |
| 54 | |
| 55 | if (!ptr) |
| 56 | panic("%s: Failed to allocate %lu bytes align=0x%lx\n", |
| 57 | __func__, size, size); |
| 58 | |
| 59 | return ptr; |
| 60 | } |
| 61 | |
Christophe Leroy | 34536d7 | 2020-05-19 05:49:22 +0000 | [diff] [blame] | 62 | pte_t __init *early_pte_alloc_kernel(pmd_t *pmdp, unsigned long va) |
Christophe Leroy | 4a6d8cf | 2019-04-26 15:58:06 +0000 | [diff] [blame] | 63 | { |
| 64 | if (pmd_none(*pmdp)) { |
| 65 | pte_t *ptep = early_alloc_pgtable(PTE_FRAG_SIZE); |
| 66 | |
| 67 | pmd_populate_kernel(&init_mm, pmdp, ptep); |
| 68 | } |
| 69 | return pte_offset_kernel(pmdp, va); |
| 70 | } |
| 71 | |
| 72 | |
| 73 | int __ref map_kernel_page(unsigned long va, phys_addr_t pa, pgprot_t prot) |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 74 | { |
| 75 | pmd_t *pd; |
| 76 | pte_t *pg; |
| 77 | int err = -ENOMEM; |
| 78 | |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 79 | /* Use upper 10 bits of VA to index the first level map */ |
Mike Rapoport | e05c7b1 | 2020-06-08 21:33:05 -0700 | [diff] [blame] | 80 | pd = pmd_off_k(va); |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 81 | /* Use middle 10 bits of VA to index the second-level map */ |
Christophe Leroy | 4a6d8cf | 2019-04-26 15:58:06 +0000 | [diff] [blame] | 82 | if (likely(slab_is_available())) |
| 83 | pg = pte_alloc_kernel(pd, va); |
| 84 | else |
| 85 | pg = early_pte_alloc_kernel(pd, va); |
Kaixu Xia | b84bf09 | 2020-11-10 10:56:01 +0800 | [diff] [blame] | 86 | if (pg) { |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 87 | err = 0; |
Benjamin Herrenschmidt | 3be4e69 | 2007-04-12 15:30:21 +1000 | [diff] [blame] | 88 | /* The PTE should never be already set nor present in the |
| 89 | * hash table |
| 90 | */ |
Christophe Leroy | 26973fa | 2018-10-09 13:51:56 +0000 | [diff] [blame] | 91 | BUG_ON((pte_present(*pg) | pte_hashpte(*pg)) && pgprot_val(prot)); |
Christophe Leroy | c766ee7 | 2018-10-09 13:51:45 +0000 | [diff] [blame] | 92 | set_pte_at(&init_mm, va, pg, pfn_pte(pa >> PAGE_SHIFT, prot)); |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 93 | } |
Scott Wood | 47ce8af | 2013-10-11 19:22:37 -0500 | [diff] [blame] | 94 | smp_wmb(); |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 95 | return err; |
| 96 | } |
| 97 | |
| 98 | /* |
Albert Herranz | de32400 | 2009-12-12 06:31:53 +0000 | [diff] [blame] | 99 | * Map in a chunk of physical memory starting at start. |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 100 | */ |
Christophe Leroy | 86b1952 | 2017-08-02 15:51:07 +0200 | [diff] [blame] | 101 | static void __init __mapin_ram_chunk(unsigned long offset, unsigned long top) |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 102 | { |
Christophe Leroy | c766ee7 | 2018-10-09 13:51:45 +0000 | [diff] [blame] | 103 | unsigned long v, s; |
Kumar Gala | 99c62dd7 | 2008-04-16 05:52:21 +1000 | [diff] [blame] | 104 | phys_addr_t p; |
Kefeng Wang | 843a1ff | 2021-11-08 18:34:13 -0800 | [diff] [blame] | 105 | bool ktext; |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 106 | |
Albert Herranz | de32400 | 2009-12-12 06:31:53 +0000 | [diff] [blame] | 107 | s = offset; |
Dale Farnsworth | ccdcef7 | 2008-12-17 10:09:13 +0000 | [diff] [blame] | 108 | v = PAGE_OFFSET + s; |
Kumar Gala | 99c62dd7 | 2008-04-16 05:52:21 +1000 | [diff] [blame] | 109 | p = memstart_addr + s; |
Albert Herranz | de32400 | 2009-12-12 06:31:53 +0000 | [diff] [blame] | 110 | for (; s < top; s += PAGE_SIZE) { |
Kefeng Wang | 843a1ff | 2021-11-08 18:34:13 -0800 | [diff] [blame] | 111 | ktext = core_kernel_text(v); |
Christophe Leroy | c766ee7 | 2018-10-09 13:51:45 +0000 | [diff] [blame] | 112 | map_kernel_page(v, p, ktext ? PAGE_KERNEL_TEXT : PAGE_KERNEL); |
Paul Mackerras | 14cf11a | 2005-09-26 16:04:21 +1000 | [diff] [blame] | 113 | v += PAGE_SIZE; |
| 114 | p += PAGE_SIZE; |
| 115 | } |
| 116 | } |
| 117 | |
Albert Herranz | de32400 | 2009-12-12 06:31:53 +0000 | [diff] [blame] | 118 | void __init mapin_ram(void) |
| 119 | { |
Mike Rapoport | b10d6bc | 2020-10-13 16:58:08 -0700 | [diff] [blame] | 120 | phys_addr_t base, end; |
| 121 | u64 i; |
Albert Herranz | de32400 | 2009-12-12 06:31:53 +0000 | [diff] [blame] | 122 | |
Mike Rapoport | b10d6bc | 2020-10-13 16:58:08 -0700 | [diff] [blame] | 123 | for_each_mem_range(i, &base, &end) { |
| 124 | phys_addr_t top = min(end, total_lowmem); |
Albert Herranz | de32400 | 2009-12-12 06:31:53 +0000 | [diff] [blame] | 125 | |
Christophe Leroy | 9e849f23 | 2019-02-21 19:08:40 +0000 | [diff] [blame] | 126 | if (base >= top) |
| 127 | continue; |
| 128 | base = mmu_mapin_ram(base, top); |
Christophe Leroy | a2227a2 | 2019-08-23 09:56:21 +0000 | [diff] [blame] | 129 | __mapin_ram_chunk(base, top); |
Albert Herranz | de32400 | 2009-12-12 06:31:53 +0000 | [diff] [blame] | 130 | } |
Albert Herranz | de32400 | 2009-12-12 06:31:53 +0000 | [diff] [blame] | 131 | } |
| 132 | |
Christophe Leroy | 3184cc4 | 2017-08-02 15:51:03 +0200 | [diff] [blame] | 133 | void mark_initmem_nx(void) |
| 134 | { |
Christophe Leroy | 3184cc4 | 2017-08-02 15:51:03 +0200 | [diff] [blame] | 135 | unsigned long numpages = PFN_UP((unsigned long)_einittext) - |
| 136 | PFN_DOWN((unsigned long)_sinittext); |
Benjamin Herrenschmidt | 88df6e9 | 2007-04-12 15:30:22 +1000 | [diff] [blame] | 137 | |
Christophe Leroy | 4e3319c | 2020-05-19 05:48:59 +0000 | [diff] [blame] | 138 | if (v_block_mapped((unsigned long)_sinittext)) |
Christophe Leroy | 63b2bc6 | 2019-02-21 19:08:49 +0000 | [diff] [blame] | 139 | mmu_mark_initmem_nx(); |
| 140 | else |
Christophe Leroy | c988cfd | 2021-06-09 11:34:31 +1000 | [diff] [blame] | 141 | set_memory_attr((unsigned long)_sinittext, numpages, PAGE_KERNEL); |
Christophe Leroy | 3184cc4 | 2017-08-02 15:51:03 +0200 | [diff] [blame] | 142 | } |
| 143 | |
Christophe Leroy | 95902e6 | 2017-08-02 15:51:05 +0200 | [diff] [blame] | 144 | #ifdef CONFIG_STRICT_KERNEL_RWX |
| 145 | void mark_rodata_ro(void) |
| 146 | { |
Christophe Leroy | 95902e6 | 2017-08-02 15:51:05 +0200 | [diff] [blame] | 147 | unsigned long numpages; |
| 148 | |
Christophe Leroy | 4e3319c | 2020-05-19 05:48:59 +0000 | [diff] [blame] | 149 | if (v_block_mapped((unsigned long)_stext + 1)) { |
Christophe Leroy | 63b2bc6 | 2019-02-21 19:08:49 +0000 | [diff] [blame] | 150 | mmu_mark_rodata_ro(); |
Christophe Leroy | e26ad93 | 2020-01-14 08:13:08 +0000 | [diff] [blame] | 151 | ptdump_check_wx(); |
Christophe Leroy | 63b2bc6 | 2019-02-21 19:08:49 +0000 | [diff] [blame] | 152 | return; |
| 153 | } |
| 154 | |
Christophe Leroy | 95902e6 | 2017-08-02 15:51:05 +0200 | [diff] [blame] | 155 | numpages = PFN_UP((unsigned long)_etext) - |
| 156 | PFN_DOWN((unsigned long)_stext); |
| 157 | |
Christophe Leroy | c988cfd | 2021-06-09 11:34:31 +1000 | [diff] [blame] | 158 | set_memory_attr((unsigned long)_stext, numpages, PAGE_KERNEL_ROX); |
Christophe Leroy | 95902e6 | 2017-08-02 15:51:05 +0200 | [diff] [blame] | 159 | /* |
| 160 | * mark .rodata as read only. Use __init_begin rather than __end_rodata |
| 161 | * to cover NOTES and EXCEPTION_TABLE. |
| 162 | */ |
Christophe Leroy | 95902e6 | 2017-08-02 15:51:05 +0200 | [diff] [blame] | 163 | numpages = PFN_UP((unsigned long)__init_begin) - |
| 164 | PFN_DOWN((unsigned long)__start_rodata); |
| 165 | |
Christophe Leroy | c988cfd | 2021-06-09 11:34:31 +1000 | [diff] [blame] | 166 | set_memory_attr((unsigned long)__start_rodata, numpages, PAGE_KERNEL_RO); |
Russell Currey | 453d87f | 2019-05-02 17:39:47 +1000 | [diff] [blame] | 167 | |
| 168 | // mark_initmem_nx() should have already run by now |
| 169 | ptdump_check_wx(); |
Christophe Leroy | 95902e6 | 2017-08-02 15:51:05 +0200 | [diff] [blame] | 170 | } |
| 171 | #endif |
| 172 | |
Christophe Leroy | f8c0e36 | 2021-10-28 14:59:15 +0200 | [diff] [blame] | 173 | #if defined(CONFIG_ARCH_SUPPORTS_DEBUG_PAGEALLOC) && defined(CONFIG_DEBUG_PAGEALLOC) |
Joonsoo Kim | 031bc57 | 2014-12-12 16:55:52 -0800 | [diff] [blame] | 174 | void __kernel_map_pages(struct page *page, int numpages, int enable) |
Benjamin Herrenschmidt | 88df6e9 | 2007-04-12 15:30:22 +1000 | [diff] [blame] | 175 | { |
Christophe Leroy | c988cfd | 2021-06-09 11:34:31 +1000 | [diff] [blame] | 176 | unsigned long addr = (unsigned long)page_address(page); |
| 177 | |
Benjamin Herrenschmidt | 88df6e9 | 2007-04-12 15:30:22 +1000 | [diff] [blame] | 178 | if (PageHighMem(page)) |
| 179 | return; |
| 180 | |
Christophe Leroy | c988cfd | 2021-06-09 11:34:31 +1000 | [diff] [blame] | 181 | if (enable) |
| 182 | set_memory_attr(addr, numpages, PAGE_KERNEL); |
| 183 | else |
| 184 | set_memory_attr(addr, numpages, __pgprot(0)); |
Benjamin Herrenschmidt | 88df6e9 | 2007-04-12 15:30:22 +1000 | [diff] [blame] | 185 | } |
| 186 | #endif /* CONFIG_DEBUG_PAGEALLOC */ |