Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 2 | #ifndef __ASM_KASAN_H |
| 3 | #define __ASM_KASAN_H |
| 4 | |
| 5 | #ifndef __ASSEMBLY__ |
| 6 | |
Will Deacon | 8304012 | 2015-10-13 14:01:06 +0100 | [diff] [blame] | 7 | #include <linux/linkage.h> |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 8 | #include <asm/memory.h> |
Mark Rutland | 068a17a | 2016-01-25 11:45:12 +0000 | [diff] [blame] | 9 | #include <asm/pgtable-types.h> |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 10 | |
Andrey Konovalov | 3c9e3aa | 2018-12-28 00:30:16 -0800 | [diff] [blame] | 11 | #define arch_kasan_set_tag(addr, tag) __tag_set(addr, tag) |
| 12 | #define arch_kasan_reset_tag(addr) __tag_reset(addr) |
| 13 | #define arch_kasan_get_tag(addr) __tag_get(addr) |
| 14 | |
| 15 | #ifdef CONFIG_KASAN |
| 16 | |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 17 | /* |
| 18 | * KASAN_SHADOW_START: beginning of the kernel virtual addresses. |
Andrey Konovalov | 917538e | 2018-02-06 15:36:44 -0800 | [diff] [blame] | 19 | * KASAN_SHADOW_END: KASAN_SHADOW_START + 1/N of kernel virtual addresses, |
| 20 | * where N = (1 << KASAN_SHADOW_SCALE_SHIFT). |
Steve Capper | 6bd1d0b | 2019-08-07 16:55:15 +0100 | [diff] [blame] | 21 | * |
| 22 | * KASAN_SHADOW_OFFSET: |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 23 | * This value is used to map an address to the corresponding shadow |
| 24 | * address by the following formula: |
Andrey Konovalov | 917538e | 2018-02-06 15:36:44 -0800 | [diff] [blame] | 25 | * shadow_addr = (address >> KASAN_SHADOW_SCALE_SHIFT) + KASAN_SHADOW_OFFSET |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 26 | * |
Andrey Konovalov | 917538e | 2018-02-06 15:36:44 -0800 | [diff] [blame] | 27 | * (1 << (64 - KASAN_SHADOW_SCALE_SHIFT)) shadow addresses that lie in range |
| 28 | * [KASAN_SHADOW_OFFSET, KASAN_SHADOW_END) cover all 64-bits of virtual |
| 29 | * addresses. So KASAN_SHADOW_OFFSET should satisfy the following equation: |
| 30 | * KASAN_SHADOW_OFFSET = KASAN_SHADOW_END - |
| 31 | * (1ULL << (64 - KASAN_SHADOW_SCALE_SHIFT)) |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 32 | */ |
Steve Capper | 6bd1d0b | 2019-08-07 16:55:15 +0100 | [diff] [blame] | 33 | #define _KASAN_SHADOW_START(va) (KASAN_SHADOW_END - (1UL << ((va) - KASAN_SHADOW_SCALE_SHIFT))) |
Steve Capper | 5383cc6 | 2019-08-07 16:55:18 +0100 | [diff] [blame] | 34 | #define KASAN_SHADOW_START _KASAN_SHADOW_START(vabits_actual) |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 35 | |
| 36 | void kasan_init(void); |
Mark Rutland | 068a17a | 2016-01-25 11:45:12 +0000 | [diff] [blame] | 37 | void kasan_copy_shadow(pgd_t *pgdir); |
Will Deacon | 8304012 | 2015-10-13 14:01:06 +0100 | [diff] [blame] | 38 | asmlinkage void kasan_early_init(void); |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 39 | |
| 40 | #else |
| 41 | static inline void kasan_init(void) { } |
Mark Rutland | 068a17a | 2016-01-25 11:45:12 +0000 | [diff] [blame] | 42 | static inline void kasan_copy_shadow(pgd_t *pgdir) { } |
Andrey Ryabinin | 39d114d | 2015-10-12 18:52:58 +0300 | [diff] [blame] | 43 | #endif |
| 44 | |
| 45 | #endif |
| 46 | #endif |