Thomas Gleixner | 50acfb2 | 2019-05-29 07:18:00 -0700 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2009 Chen Liqin <liqin.chen@sunplusct.com> |
| 4 | * Copyright (C) 2012 Regents of the University of California |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #ifndef _ASM_RISCV_PGALLOC_H |
| 8 | #define _ASM_RISCV_PGALLOC_H |
| 9 | |
| 10 | #include <linux/mm.h> |
| 11 | #include <asm/tlb.h> |
| 12 | |
Christoph Hellwig | 6bd33e1 | 2019-10-28 13:10:41 +0100 | [diff] [blame] | 13 | #ifdef CONFIG_MMU |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 14 | #define __HAVE_ARCH_PUD_ALLOC_ONE |
| 15 | #define __HAVE_ARCH_PUD_FREE |
Mike Rapoport | 1355c31 | 2020-08-06 23:22:39 -0700 | [diff] [blame] | 16 | #include <asm-generic/pgalloc.h> |
Mike Rapoport | d1b46fe | 2019-07-11 20:58:31 -0700 | [diff] [blame] | 17 | |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 18 | static inline void pmd_populate_kernel(struct mm_struct *mm, |
| 19 | pmd_t *pmd, pte_t *pte) |
| 20 | { |
| 21 | unsigned long pfn = virt_to_pfn(pte); |
| 22 | |
| 23 | set_pmd(pmd, __pmd((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); |
| 24 | } |
| 25 | |
| 26 | static inline void pmd_populate(struct mm_struct *mm, |
| 27 | pmd_t *pmd, pgtable_t pte) |
| 28 | { |
| 29 | unsigned long pfn = virt_to_pfn(page_address(pte)); |
| 30 | |
| 31 | set_pmd(pmd, __pmd((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); |
| 32 | } |
| 33 | |
| 34 | #ifndef __PAGETABLE_PMD_FOLDED |
| 35 | static inline void pud_populate(struct mm_struct *mm, pud_t *pud, pmd_t *pmd) |
| 36 | { |
| 37 | unsigned long pfn = virt_to_pfn(pmd); |
| 38 | |
| 39 | set_pud(pud, __pud((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); |
| 40 | } |
Alexandre Ghiti | e8a62cc | 2021-12-06 11:46:51 +0100 | [diff] [blame] | 41 | |
| 42 | static inline void p4d_populate(struct mm_struct *mm, p4d_t *p4d, pud_t *pud) |
| 43 | { |
| 44 | if (pgtable_l4_enabled) { |
| 45 | unsigned long pfn = virt_to_pfn(pud); |
| 46 | |
| 47 | set_p4d(p4d, __p4d((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); |
| 48 | } |
| 49 | } |
| 50 | |
| 51 | static inline void p4d_populate_safe(struct mm_struct *mm, p4d_t *p4d, |
| 52 | pud_t *pud) |
| 53 | { |
| 54 | if (pgtable_l4_enabled) { |
| 55 | unsigned long pfn = virt_to_pfn(pud); |
| 56 | |
| 57 | set_p4d_safe(p4d, |
| 58 | __p4d((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); |
| 59 | } |
| 60 | } |
| 61 | |
| 62 | #define pud_alloc_one pud_alloc_one |
| 63 | static inline pud_t *pud_alloc_one(struct mm_struct *mm, unsigned long addr) |
| 64 | { |
| 65 | if (pgtable_l4_enabled) |
| 66 | return __pud_alloc_one(mm, addr); |
| 67 | |
| 68 | return NULL; |
| 69 | } |
| 70 | |
| 71 | #define pud_free pud_free |
| 72 | static inline void pud_free(struct mm_struct *mm, pud_t *pud) |
| 73 | { |
| 74 | if (pgtable_l4_enabled) |
| 75 | __pud_free(mm, pud); |
| 76 | } |
| 77 | |
| 78 | #define __pud_free_tlb(tlb, pud, addr) pud_free((tlb)->mm, pud) |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 79 | #endif /* __PAGETABLE_PMD_FOLDED */ |
| 80 | |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 81 | static inline pgd_t *pgd_alloc(struct mm_struct *mm) |
| 82 | { |
| 83 | pgd_t *pgd; |
| 84 | |
| 85 | pgd = (pgd_t *)__get_free_page(GFP_KERNEL); |
| 86 | if (likely(pgd != NULL)) { |
| 87 | memset(pgd, 0, USER_PTRS_PER_PGD * sizeof(pgd_t)); |
| 88 | /* Copy kernel mappings */ |
| 89 | memcpy(pgd + USER_PTRS_PER_PGD, |
| 90 | init_mm.pgd + USER_PTRS_PER_PGD, |
| 91 | (PTRS_PER_PGD - USER_PTRS_PER_PGD) * sizeof(pgd_t)); |
| 92 | } |
| 93 | return pgd; |
| 94 | } |
| 95 | |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 96 | #ifndef __PAGETABLE_PMD_FOLDED |
| 97 | |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 98 | #define __pmd_free_tlb(tlb, pmd, addr) pmd_free((tlb)->mm, pmd) |
| 99 | |
| 100 | #endif /* __PAGETABLE_PMD_FOLDED */ |
| 101 | |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 102 | #define __pte_free_tlb(tlb, pte, buf) \ |
| 103 | do { \ |
Mark Rutland | b4ed71f | 2019-09-25 16:49:46 -0700 | [diff] [blame] | 104 | pgtable_pte_page_dtor(pte); \ |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 105 | tlb_remove_page((tlb), pte); \ |
| 106 | } while (0) |
Christoph Hellwig | 6bd33e1 | 2019-10-28 13:10:41 +0100 | [diff] [blame] | 107 | #endif /* CONFIG_MMU */ |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 108 | |
Palmer Dabbelt | 07037db | 2017-07-10 18:06:09 -0700 | [diff] [blame] | 109 | #endif /* _ASM_RISCV_PGALLOC_H */ |