Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 2 | #ifndef __MM_KASAN_KASAN_H |
| 3 | #define __MM_KASAN_KASAN_H |
| 4 | |
| 5 | #include <linux/kasan.h> |
Mark Rutland | 8286824 | 2021-07-14 15:38:42 +0100 | [diff] [blame] | 6 | #include <linux/kasan-tags.h> |
Alexander Potapenko | 2b83052 | 2021-02-25 17:19:21 -0800 | [diff] [blame] | 7 | #include <linux/kfence.h> |
Alexander Potapenko | cd11016 | 2016-03-25 14:22:08 -0700 | [diff] [blame] | 8 | #include <linux/stackdepot.h> |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 9 | |
Andrey Konovalov | 8028caa | 2020-12-22 12:03:06 -0800 | [diff] [blame] | 10 | #ifdef CONFIG_KASAN_HW_TAGS |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 11 | |
Andrey Konovalov | 8028caa | 2020-12-22 12:03:06 -0800 | [diff] [blame] | 12 | #include <linux/static_key.h> |
Yee Lee | 77a63c6 | 2021-07-14 21:26:37 -0700 | [diff] [blame] | 13 | #include "../slab.h" |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 14 | |
Andrey Konovalov | 8028caa | 2020-12-22 12:03:06 -0800 | [diff] [blame] | 15 | DECLARE_STATIC_KEY_FALSE(kasan_flag_stacktrace); |
Vincenzo Frascino | 2d27e58 | 2021-10-06 16:47:51 +0100 | [diff] [blame] | 16 | |
| 17 | enum kasan_mode { |
| 18 | KASAN_MODE_SYNC, |
| 19 | KASAN_MODE_ASYNC, |
| 20 | KASAN_MODE_ASYMM, |
| 21 | }; |
| 22 | |
| 23 | extern enum kasan_mode kasan_mode __ro_after_init; |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 24 | |
Andrey Konovalov | 8028caa | 2020-12-22 12:03:06 -0800 | [diff] [blame] | 25 | static inline bool kasan_stack_collection_enabled(void) |
| 26 | { |
| 27 | return static_branch_unlikely(&kasan_flag_stacktrace); |
| 28 | } |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 29 | |
Vincenzo Frascino | 2d27e58 | 2021-10-06 16:47:51 +0100 | [diff] [blame] | 30 | static inline bool kasan_async_fault_possible(void) |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 31 | { |
Vincenzo Frascino | 2d27e58 | 2021-10-06 16:47:51 +0100 | [diff] [blame] | 32 | return kasan_mode == KASAN_MODE_ASYNC || kasan_mode == KASAN_MODE_ASYMM; |
| 33 | } |
| 34 | |
| 35 | static inline bool kasan_sync_fault_possible(void) |
| 36 | { |
| 37 | return kasan_mode == KASAN_MODE_SYNC || kasan_mode == KASAN_MODE_ASYMM; |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 38 | } |
Andrey Konovalov | 8028caa | 2020-12-22 12:03:06 -0800 | [diff] [blame] | 39 | #else |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 40 | |
Andrey Konovalov | 8028caa | 2020-12-22 12:03:06 -0800 | [diff] [blame] | 41 | static inline bool kasan_stack_collection_enabled(void) |
| 42 | { |
| 43 | return true; |
| 44 | } |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 45 | |
Vincenzo Frascino | 2d27e58 | 2021-10-06 16:47:51 +0100 | [diff] [blame] | 46 | static inline bool kasan_async_fault_possible(void) |
Vincenzo Frascino | 8f7b505 | 2021-03-15 13:20:14 +0000 | [diff] [blame] | 47 | { |
| 48 | return false; |
| 49 | } |
| 50 | |
Vincenzo Frascino | 2d27e58 | 2021-10-06 16:47:51 +0100 | [diff] [blame] | 51 | static inline bool kasan_sync_fault_possible(void) |
| 52 | { |
| 53 | return true; |
| 54 | } |
| 55 | |
Andrey Konovalov | 8028caa | 2020-12-22 12:03:06 -0800 | [diff] [blame] | 56 | #endif |
| 57 | |
Andrey Konovalov | 6c6a04f | 2020-12-22 12:01:59 -0800 | [diff] [blame] | 58 | #if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS) |
Andrey Konovalov | 1f60062 | 2020-12-22 12:00:24 -0800 | [diff] [blame] | 59 | #define KASAN_GRANULE_SIZE (1UL << KASAN_SHADOW_SCALE_SHIFT) |
Andrey Konovalov | 6c6a04f | 2020-12-22 12:01:59 -0800 | [diff] [blame] | 60 | #else |
| 61 | #include <asm/mte-kasan.h> |
| 62 | #define KASAN_GRANULE_SIZE MTE_GRANULE_SIZE |
| 63 | #endif |
| 64 | |
Andrey Konovalov | 1f60062 | 2020-12-22 12:00:24 -0800 | [diff] [blame] | 65 | #define KASAN_GRANULE_MASK (KASAN_GRANULE_SIZE - 1) |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 66 | |
Andrey Konovalov | affc3f0 | 2020-12-22 12:00:35 -0800 | [diff] [blame] | 67 | #define KASAN_MEMORY_PER_SHADOW_PAGE (KASAN_GRANULE_SIZE << PAGE_SHIFT) |
| 68 | |
Andrey Konovalov | 7f94ffb | 2018-12-28 00:30:50 -0800 | [diff] [blame] | 69 | #ifdef CONFIG_KASAN_GENERIC |
Andrey Ryabinin | b8c73fc | 2015-02-13 14:39:28 -0800 | [diff] [blame] | 70 | #define KASAN_FREE_PAGE 0xFF /* page was freed */ |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 71 | #define KASAN_PAGE_REDZONE 0xFE /* redzone for kmalloc_large allocations */ |
| 72 | #define KASAN_KMALLOC_REDZONE 0xFC /* redzone inside slub object */ |
| 73 | #define KASAN_KMALLOC_FREE 0xFB /* object was freed (kmem_cache_free/kfree) */ |
Walter Wu | e4b7818 | 2020-08-06 23:24:39 -0700 | [diff] [blame] | 74 | #define KASAN_KMALLOC_FREETRACK 0xFA /* object was freed and has free track set */ |
Andrey Konovalov | 7f94ffb | 2018-12-28 00:30:50 -0800 | [diff] [blame] | 75 | #else |
| 76 | #define KASAN_FREE_PAGE KASAN_TAG_INVALID |
| 77 | #define KASAN_PAGE_REDZONE KASAN_TAG_INVALID |
| 78 | #define KASAN_KMALLOC_REDZONE KASAN_TAG_INVALID |
| 79 | #define KASAN_KMALLOC_FREE KASAN_TAG_INVALID |
Walter Wu | e4b7818 | 2020-08-06 23:24:39 -0700 | [diff] [blame] | 80 | #define KASAN_KMALLOC_FREETRACK KASAN_TAG_INVALID |
Andrey Konovalov | 7f94ffb | 2018-12-28 00:30:50 -0800 | [diff] [blame] | 81 | #endif |
| 82 | |
Walter Wu | e4b7818 | 2020-08-06 23:24:39 -0700 | [diff] [blame] | 83 | #define KASAN_GLOBAL_REDZONE 0xF9 /* redzone for global variable */ |
| 84 | #define KASAN_VMALLOC_INVALID 0xF8 /* unallocated space in vmapped page */ |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 85 | |
Andrey Ryabinin | c420f16 | 2015-02-13 14:39:59 -0800 | [diff] [blame] | 86 | /* |
| 87 | * Stack redzone shadow values |
| 88 | * (Those are compiler's ABI, don't change them) |
| 89 | */ |
| 90 | #define KASAN_STACK_LEFT 0xF1 |
| 91 | #define KASAN_STACK_MID 0xF2 |
| 92 | #define KASAN_STACK_RIGHT 0xF3 |
| 93 | #define KASAN_STACK_PARTIAL 0xF4 |
| 94 | |
Paul Lawrence | 342061e | 2018-02-06 15:36:11 -0800 | [diff] [blame] | 95 | /* |
| 96 | * alloca redzone shadow values |
| 97 | */ |
| 98 | #define KASAN_ALLOCA_LEFT 0xCA |
| 99 | #define KASAN_ALLOCA_RIGHT 0xCB |
| 100 | |
| 101 | #define KASAN_ALLOCA_REDZONE_SIZE 32 |
| 102 | |
Marco Elver | e896921 | 2019-07-11 20:53:49 -0700 | [diff] [blame] | 103 | /* |
| 104 | * Stack frame marker (compiler ABI). |
| 105 | */ |
| 106 | #define KASAN_CURRENT_STACK_FRAME_MAGIC 0x41B58AB3 |
| 107 | |
Andrey Ryabinin | bebf56a | 2015-02-13 14:40:17 -0800 | [diff] [blame] | 108 | /* Don't break randconfig/all*config builds */ |
| 109 | #ifndef KASAN_ABI_VERSION |
| 110 | #define KASAN_ABI_VERSION 1 |
| 111 | #endif |
Andrey Ryabinin | b8c73fc | 2015-02-13 14:39:28 -0800 | [diff] [blame] | 112 | |
Andrey Konovalov | 96e0279 | 2020-12-22 12:01:17 -0800 | [diff] [blame] | 113 | /* Metadata layout customization. */ |
| 114 | #define META_BYTES_PER_BLOCK 1 |
| 115 | #define META_BLOCKS_PER_ROW 16 |
| 116 | #define META_BYTES_PER_ROW (META_BLOCKS_PER_ROW * META_BYTES_PER_BLOCK) |
| 117 | #define META_MEM_BYTES_PER_ROW (META_BYTES_PER_ROW * KASAN_GRANULE_SIZE) |
| 118 | #define META_ROWS_AROUND_ADDR 2 |
| 119 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 120 | struct kasan_access_info { |
| 121 | const void *access_addr; |
| 122 | const void *first_bad_addr; |
| 123 | size_t access_size; |
| 124 | bool is_write; |
| 125 | unsigned long ip; |
| 126 | }; |
| 127 | |
Andrey Ryabinin | bebf56a | 2015-02-13 14:40:17 -0800 | [diff] [blame] | 128 | /* The layout of struct dictated by compiler */ |
| 129 | struct kasan_source_location { |
| 130 | const char *filename; |
| 131 | int line_no; |
| 132 | int column_no; |
| 133 | }; |
| 134 | |
| 135 | /* The layout of struct dictated by compiler */ |
| 136 | struct kasan_global { |
| 137 | const void *beg; /* Address of the beginning of the global variable. */ |
| 138 | size_t size; /* Size of the global variable. */ |
| 139 | size_t size_with_redzone; /* Size of the variable + size of the red zone. 32 bytes aligned */ |
| 140 | const void *name; |
| 141 | const void *module_name; /* Name of the module where the global variable is declared. */ |
| 142 | unsigned long has_dynamic_init; /* This needed for C++ */ |
| 143 | #if KASAN_ABI_VERSION >= 4 |
| 144 | struct kasan_source_location *location; |
| 145 | #endif |
Dmitry Vyukov | 045d599 | 2016-11-30 15:54:13 -0800 | [diff] [blame] | 146 | #if KASAN_ABI_VERSION >= 5 |
| 147 | char *odr_indicator; |
| 148 | #endif |
Andrey Ryabinin | bebf56a | 2015-02-13 14:40:17 -0800 | [diff] [blame] | 149 | }; |
| 150 | |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 151 | /** |
| 152 | * Structures to keep alloc and free tracks * |
| 153 | */ |
| 154 | |
Alexander Potapenko | cd11016 | 2016-03-25 14:22:08 -0700 | [diff] [blame] | 155 | #define KASAN_STACK_DEPTH 64 |
| 156 | |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 157 | struct kasan_track { |
Alexander Potapenko | cd11016 | 2016-03-25 14:22:08 -0700 | [diff] [blame] | 158 | u32 pid; |
| 159 | depot_stack_handle_t stack; |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 160 | }; |
| 161 | |
Kuan-Ying Lee | 7a22bdc | 2021-06-28 19:40:58 -0700 | [diff] [blame] | 162 | #if defined(CONFIG_KASAN_TAGS_IDENTIFY) && defined(CONFIG_KASAN_SW_TAGS) |
Walter Wu | ae8f06b | 2019-09-23 15:34:13 -0700 | [diff] [blame] | 163 | #define KASAN_NR_FREE_STACKS 5 |
| 164 | #else |
| 165 | #define KASAN_NR_FREE_STACKS 1 |
| 166 | #endif |
| 167 | |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 168 | struct kasan_alloc_meta { |
Andrey Ryabinin | b3cbd9b | 2016-08-02 14:02:52 -0700 | [diff] [blame] | 169 | struct kasan_track alloc_track; |
Walter Wu | 26e760c | 2020-08-06 23:24:35 -0700 | [diff] [blame] | 170 | #ifdef CONFIG_KASAN_GENERIC |
| 171 | /* |
Walter Wu | 23f61f0 | 2021-04-29 23:00:45 -0700 | [diff] [blame] | 172 | * The auxiliary stack is stored into struct kasan_alloc_meta. |
Walter Wu | 26e760c | 2020-08-06 23:24:35 -0700 | [diff] [blame] | 173 | * The free stack is stored into struct kasan_free_meta. |
| 174 | */ |
| 175 | depot_stack_handle_t aux_stack[2]; |
Walter Wu | e4b7818 | 2020-08-06 23:24:39 -0700 | [diff] [blame] | 176 | #else |
Walter Wu | ae8f06b | 2019-09-23 15:34:13 -0700 | [diff] [blame] | 177 | struct kasan_track free_track[KASAN_NR_FREE_STACKS]; |
Walter Wu | e4b7818 | 2020-08-06 23:24:39 -0700 | [diff] [blame] | 178 | #endif |
Kuan-Ying Lee | f06f78a | 2021-06-28 19:40:52 -0700 | [diff] [blame] | 179 | #ifdef CONFIG_KASAN_TAGS_IDENTIFY |
Walter Wu | ae8f06b | 2019-09-23 15:34:13 -0700 | [diff] [blame] | 180 | u8 free_pointer_tag[KASAN_NR_FREE_STACKS]; |
| 181 | u8 free_track_idx; |
| 182 | #endif |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 183 | }; |
| 184 | |
Alexander Potapenko | 55834c5 | 2016-05-20 16:59:11 -0700 | [diff] [blame] | 185 | struct qlist_node { |
| 186 | struct qlist_node *next; |
| 187 | }; |
Andrey Konovalov | 97593ca | 2020-12-22 12:03:28 -0800 | [diff] [blame] | 188 | |
| 189 | /* |
| 190 | * Generic mode either stores free meta in the object itself or in the redzone |
| 191 | * after the object. In the former case free meta offset is 0, in the latter |
| 192 | * case it has some sane value smaller than INT_MAX. Use INT_MAX as free meta |
| 193 | * offset when free meta isn't present. |
| 194 | */ |
| 195 | #define KASAN_NO_FREE_META INT_MAX |
| 196 | |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 197 | struct kasan_free_meta { |
Andrey Konovalov | 97593ca | 2020-12-22 12:03:28 -0800 | [diff] [blame] | 198 | #ifdef CONFIG_KASAN_GENERIC |
Alexander Potapenko | 55834c5 | 2016-05-20 16:59:11 -0700 | [diff] [blame] | 199 | /* This field is used while the object is in the quarantine. |
| 200 | * Otherwise it might be used for the allocator freelist. |
| 201 | */ |
| 202 | struct qlist_node quarantine_link; |
Walter Wu | e4b7818 | 2020-08-06 23:24:39 -0700 | [diff] [blame] | 203 | struct kasan_track free_track; |
| 204 | #endif |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 205 | }; |
| 206 | |
Andrey Konovalov | 6476792 | 2020-12-22 12:02:34 -0800 | [diff] [blame] | 207 | struct kasan_alloc_meta *kasan_get_alloc_meta(struct kmem_cache *cache, |
| 208 | const void *object); |
Andrey Konovalov | 97593ca | 2020-12-22 12:03:28 -0800 | [diff] [blame] | 209 | #ifdef CONFIG_KASAN_GENERIC |
Andrey Konovalov | 6476792 | 2020-12-22 12:02:34 -0800 | [diff] [blame] | 210 | struct kasan_free_meta *kasan_get_free_meta(struct kmem_cache *cache, |
| 211 | const void *object); |
Andrey Konovalov | 97593ca | 2020-12-22 12:03:28 -0800 | [diff] [blame] | 212 | #endif |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 213 | |
Andrey Konovalov | 2e903b9 | 2020-12-22 12:02:10 -0800 | [diff] [blame] | 214 | #if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS) |
| 215 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 216 | static inline const void *kasan_shadow_to_mem(const void *shadow_addr) |
| 217 | { |
| 218 | return (void *)(((unsigned long)shadow_addr - KASAN_SHADOW_OFFSET) |
| 219 | << KASAN_SHADOW_SCALE_SHIFT); |
| 220 | } |
| 221 | |
Andrey Konovalov | 6882464 | 2020-12-22 12:01:07 -0800 | [diff] [blame] | 222 | static inline bool addr_has_metadata(const void *addr) |
Andrey Konovalov | 11cd3cd | 2018-12-28 00:30:38 -0800 | [diff] [blame] | 223 | { |
| 224 | return (addr >= kasan_shadow_to_mem((void *)KASAN_SHADOW_START)); |
| 225 | } |
| 226 | |
Marco Elver | b5f6e0f | 2019-07-11 20:54:07 -0700 | [diff] [blame] | 227 | /** |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 228 | * kasan_check_range - Check memory region, and report if invalid access. |
Marco Elver | b5f6e0f | 2019-07-11 20:54:07 -0700 | [diff] [blame] | 229 | * @addr: the accessed address |
| 230 | * @size: the accessed size |
| 231 | * @write: true if access is a write access |
| 232 | * @ret_ip: return address |
| 233 | * @return: true if access was valid, false if invalid |
| 234 | */ |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 235 | bool kasan_check_range(unsigned long addr, size_t size, bool write, |
Andrey Konovalov | bffa986 | 2018-12-28 00:29:45 -0800 | [diff] [blame] | 236 | unsigned long ret_ip); |
| 237 | |
Andrey Konovalov | 2e903b9 | 2020-12-22 12:02:10 -0800 | [diff] [blame] | 238 | #else /* CONFIG_KASAN_GENERIC || CONFIG_KASAN_SW_TAGS */ |
| 239 | |
| 240 | static inline bool addr_has_metadata(const void *addr) |
| 241 | { |
Vincenzo Frascino | b99acdc | 2021-02-04 18:32:53 -0800 | [diff] [blame] | 242 | return (is_vmalloc_addr(addr) || virt_addr_valid(addr)); |
Andrey Konovalov | 2e903b9 | 2020-12-22 12:02:10 -0800 | [diff] [blame] | 243 | } |
| 244 | |
| 245 | #endif /* CONFIG_KASAN_GENERIC || CONFIG_KASAN_SW_TAGS */ |
| 246 | |
Andrey Konovalov | d8dd397 | 2020-12-22 12:02:56 -0800 | [diff] [blame] | 247 | #if defined(CONFIG_KASAN_SW_TAGS) || defined(CONFIG_KASAN_HW_TAGS) |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 248 | void kasan_print_tags(u8 addr_tag, const void *addr); |
Andrey Konovalov | d8dd397 | 2020-12-22 12:02:56 -0800 | [diff] [blame] | 249 | #else |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 250 | static inline void kasan_print_tags(u8 addr_tag, const void *addr) { } |
Andrey Konovalov | d8dd397 | 2020-12-22 12:02:56 -0800 | [diff] [blame] | 251 | #endif |
| 252 | |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 253 | void *kasan_find_first_bad_addr(void *addr, size_t size); |
| 254 | const char *kasan_get_bug_type(struct kasan_access_info *info); |
| 255 | void kasan_metadata_fetch_row(char *buffer, void *row); |
Andrey Konovalov | 11cd3cd | 2018-12-28 00:30:38 -0800 | [diff] [blame] | 256 | |
Walter Wu | 02c5877 | 2021-04-16 15:46:00 -0700 | [diff] [blame] | 257 | #if defined(CONFIG_KASAN_GENERIC) && defined(CONFIG_KASAN_STACK) |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 258 | void kasan_print_address_stack_frame(const void *addr); |
Andrey Konovalov | 97fc712 | 2020-12-22 12:00:49 -0800 | [diff] [blame] | 259 | #else |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 260 | static inline void kasan_print_address_stack_frame(const void *addr) { } |
Andrey Konovalov | 97fc712 | 2020-12-22 12:00:49 -0800 | [diff] [blame] | 261 | #endif |
| 262 | |
Walter Wu | 8cceeff | 2020-04-01 21:09:37 -0700 | [diff] [blame] | 263 | bool kasan_report(unsigned long addr, size_t size, |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 264 | bool is_write, unsigned long ip); |
Dmitry Vyukov | ee3ce77 | 2018-02-06 15:36:27 -0800 | [diff] [blame] | 265 | void kasan_report_invalid_free(void *object, unsigned long ip); |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 266 | |
Walter Wu | ae8f06b | 2019-09-23 15:34:13 -0700 | [diff] [blame] | 267 | struct page *kasan_addr_to_page(const void *addr); |
Matthew Wilcox (Oracle) | 6e48a96 | 2021-10-04 14:46:46 +0100 | [diff] [blame] | 268 | struct slab *kasan_addr_to_slab(const void *addr); |
Walter Wu | ae8f06b | 2019-09-23 15:34:13 -0700 | [diff] [blame] | 269 | |
Marco Elver | 7594b34 | 2021-11-05 13:35:43 -0700 | [diff] [blame] | 270 | depot_stack_handle_t kasan_save_stack(gfp_t flags, bool can_alloc); |
Walter Wu | e4b7818 | 2020-08-06 23:24:39 -0700 | [diff] [blame] | 271 | void kasan_set_track(struct kasan_track *track, gfp_t flags); |
| 272 | void kasan_set_free_info(struct kmem_cache *cache, void *object, u8 tag); |
| 273 | struct kasan_track *kasan_get_free_track(struct kmem_cache *cache, |
| 274 | void *object, u8 tag); |
Walter Wu | 26e760c | 2020-08-06 23:24:35 -0700 | [diff] [blame] | 275 | |
Andrey Konovalov | 2bd926b | 2018-12-28 00:29:53 -0800 | [diff] [blame] | 276 | #if defined(CONFIG_KASAN_GENERIC) && \ |
| 277 | (defined(CONFIG_SLAB) || defined(CONFIG_SLUB)) |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 278 | bool kasan_quarantine_put(struct kmem_cache *cache, void *object); |
| 279 | void kasan_quarantine_reduce(void); |
| 280 | void kasan_quarantine_remove_cache(struct kmem_cache *cache); |
Alexander Potapenko | 55834c5 | 2016-05-20 16:59:11 -0700 | [diff] [blame] | 281 | #else |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 282 | static inline bool kasan_quarantine_put(struct kmem_cache *cache, void *object) { return false; } |
| 283 | static inline void kasan_quarantine_reduce(void) { } |
| 284 | static inline void kasan_quarantine_remove_cache(struct kmem_cache *cache) { } |
Alexander Potapenko | 55834c5 | 2016-05-20 16:59:11 -0700 | [diff] [blame] | 285 | #endif |
| 286 | |
Andrey Konovalov | 3c9e3aa | 2018-12-28 00:30:16 -0800 | [diff] [blame] | 287 | #ifndef arch_kasan_set_tag |
Qian Cai | c412a76 | 2019-03-28 20:43:15 -0700 | [diff] [blame] | 288 | static inline const void *arch_kasan_set_tag(const void *addr, u8 tag) |
| 289 | { |
| 290 | return addr; |
| 291 | } |
Andrey Konovalov | 3c9e3aa | 2018-12-28 00:30:16 -0800 | [diff] [blame] | 292 | #endif |
Andrey Konovalov | 3c9e3aa | 2018-12-28 00:30:16 -0800 | [diff] [blame] | 293 | #ifndef arch_kasan_get_tag |
| 294 | #define arch_kasan_get_tag(addr) 0 |
| 295 | #endif |
| 296 | |
| 297 | #define set_tag(addr, tag) ((void *)arch_kasan_set_tag((addr), (tag))) |
Andrey Konovalov | 3c9e3aa | 2018-12-28 00:30:16 -0800 | [diff] [blame] | 298 | #define get_tag(addr) arch_kasan_get_tag(addr) |
| 299 | |
Andrey Konovalov | ccbe2aa | 2020-12-22 12:01:56 -0800 | [diff] [blame] | 300 | #ifdef CONFIG_KASAN_HW_TAGS |
| 301 | |
Vincenzo Frascino | 2603f8a | 2021-03-15 13:20:12 +0000 | [diff] [blame] | 302 | #ifndef arch_enable_tagging_sync |
| 303 | #define arch_enable_tagging_sync() |
| 304 | #endif |
| 305 | #ifndef arch_enable_tagging_async |
| 306 | #define arch_enable_tagging_async() |
Andrey Konovalov | ccbe2aa | 2020-12-22 12:01:56 -0800 | [diff] [blame] | 307 | #endif |
Vincenzo Frascino | 2d27e58 | 2021-10-06 16:47:51 +0100 | [diff] [blame] | 308 | #ifndef arch_enable_tagging_asymm |
| 309 | #define arch_enable_tagging_asymm() |
| 310 | #endif |
Andrey Konovalov | e80a76a | 2021-03-15 13:20:19 +0000 | [diff] [blame] | 311 | #ifndef arch_force_async_tag_fault |
| 312 | #define arch_force_async_tag_fault() |
| 313 | #endif |
Andrey Konovalov | ccbe2aa | 2020-12-22 12:01:56 -0800 | [diff] [blame] | 314 | #ifndef arch_get_random_tag |
| 315 | #define arch_get_random_tag() (0xFF) |
| 316 | #endif |
| 317 | #ifndef arch_get_mem_tag |
| 318 | #define arch_get_mem_tag(addr) (0xFF) |
| 319 | #endif |
| 320 | #ifndef arch_set_mem_tag_range |
Andrey Konovalov | d9b6f90 | 2021-04-29 22:59:55 -0700 | [diff] [blame] | 321 | #define arch_set_mem_tag_range(addr, size, tag, init) ((void *)(addr)) |
Andrey Konovalov | ccbe2aa | 2020-12-22 12:01:56 -0800 | [diff] [blame] | 322 | #endif |
| 323 | |
Vincenzo Frascino | 2603f8a | 2021-03-15 13:20:12 +0000 | [diff] [blame] | 324 | #define hw_enable_tagging_sync() arch_enable_tagging_sync() |
| 325 | #define hw_enable_tagging_async() arch_enable_tagging_async() |
Vincenzo Frascino | 2d27e58 | 2021-10-06 16:47:51 +0100 | [diff] [blame] | 326 | #define hw_enable_tagging_asymm() arch_enable_tagging_asymm() |
Andrey Konovalov | e80a76a | 2021-03-15 13:20:19 +0000 | [diff] [blame] | 327 | #define hw_force_async_tag_fault() arch_force_async_tag_fault() |
Andrey Konovalov | ccbe2aa | 2020-12-22 12:01:56 -0800 | [diff] [blame] | 328 | #define hw_get_random_tag() arch_get_random_tag() |
| 329 | #define hw_get_mem_tag(addr) arch_get_mem_tag(addr) |
Andrey Konovalov | d9b6f90 | 2021-04-29 22:59:55 -0700 | [diff] [blame] | 330 | #define hw_set_mem_tag_range(addr, size, tag, init) \ |
| 331 | arch_set_mem_tag_range((addr), (size), (tag), (init)) |
Andrey Konovalov | ccbe2aa | 2020-12-22 12:01:56 -0800 | [diff] [blame] | 332 | |
Andrey Konovalov | f05842c | 2021-02-24 12:05:26 -0800 | [diff] [blame] | 333 | #else /* CONFIG_KASAN_HW_TAGS */ |
| 334 | |
Vincenzo Frascino | 2603f8a | 2021-03-15 13:20:12 +0000 | [diff] [blame] | 335 | #define hw_enable_tagging_sync() |
| 336 | #define hw_enable_tagging_async() |
Vincenzo Frascino | 2d27e58 | 2021-10-06 16:47:51 +0100 | [diff] [blame] | 337 | #define hw_enable_tagging_asymm() |
Andrey Konovalov | f05842c | 2021-02-24 12:05:26 -0800 | [diff] [blame] | 338 | |
Andrey Konovalov | ccbe2aa | 2020-12-22 12:01:56 -0800 | [diff] [blame] | 339 | #endif /* CONFIG_KASAN_HW_TAGS */ |
| 340 | |
Andrey Konovalov | f05842c | 2021-02-24 12:05:26 -0800 | [diff] [blame] | 341 | #if defined(CONFIG_KASAN_HW_TAGS) && IS_ENABLED(CONFIG_KASAN_KUNIT_TEST) |
| 342 | |
Vincenzo Frascino | 2603f8a | 2021-03-15 13:20:12 +0000 | [diff] [blame] | 343 | void kasan_enable_tagging_sync(void); |
Andrey Konovalov | e80a76a | 2021-03-15 13:20:19 +0000 | [diff] [blame] | 344 | void kasan_force_async_fault(void); |
Andrey Konovalov | f05842c | 2021-02-24 12:05:26 -0800 | [diff] [blame] | 345 | |
| 346 | #else /* CONFIG_KASAN_HW_TAGS || CONFIG_KASAN_KUNIT_TEST */ |
| 347 | |
Vincenzo Frascino | 2603f8a | 2021-03-15 13:20:12 +0000 | [diff] [blame] | 348 | static inline void kasan_enable_tagging_sync(void) { } |
Andrey Konovalov | e80a76a | 2021-03-15 13:20:19 +0000 | [diff] [blame] | 349 | static inline void kasan_force_async_fault(void) { } |
Andrey Konovalov | f05842c | 2021-02-24 12:05:26 -0800 | [diff] [blame] | 350 | |
| 351 | #endif /* CONFIG_KASAN_HW_TAGS || CONFIG_KASAN_KUNIT_TEST */ |
| 352 | |
Andrey Konovalov | d8dd397 | 2020-12-22 12:02:56 -0800 | [diff] [blame] | 353 | #ifdef CONFIG_KASAN_SW_TAGS |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 354 | u8 kasan_random_tag(void); |
Andrey Konovalov | d8dd397 | 2020-12-22 12:02:56 -0800 | [diff] [blame] | 355 | #elif defined(CONFIG_KASAN_HW_TAGS) |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 356 | static inline u8 kasan_random_tag(void) { return hw_get_random_tag(); } |
Andrey Konovalov | d8dd397 | 2020-12-22 12:02:56 -0800 | [diff] [blame] | 357 | #else |
Andrey Konovalov | f00748b | 2021-02-24 12:05:05 -0800 | [diff] [blame] | 358 | static inline u8 kasan_random_tag(void) { return 0; } |
Andrey Konovalov | d8dd397 | 2020-12-22 12:02:56 -0800 | [diff] [blame] | 359 | #endif |
| 360 | |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 361 | #ifdef CONFIG_KASAN_HW_TAGS |
| 362 | |
Andrey Konovalov | aa5c219 | 2021-04-29 22:59:59 -0700 | [diff] [blame] | 363 | static inline void kasan_poison(const void *addr, size_t size, u8 value, bool init) |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 364 | { |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 365 | addr = kasan_reset_tag(addr); |
Alexander Potapenko | 2b83052 | 2021-02-25 17:19:21 -0800 | [diff] [blame] | 366 | |
| 367 | /* Skip KFENCE memory if called explicitly outside of sl*b. */ |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 368 | if (is_kfence_address(addr)) |
Alexander Potapenko | 2b83052 | 2021-02-25 17:19:21 -0800 | [diff] [blame] | 369 | return; |
| 370 | |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 371 | if (WARN_ON((unsigned long)addr & KASAN_GRANULE_MASK)) |
| 372 | return; |
| 373 | if (WARN_ON(size & KASAN_GRANULE_MASK)) |
| 374 | return; |
| 375 | |
Andrey Konovalov | aa5c219 | 2021-04-29 22:59:59 -0700 | [diff] [blame] | 376 | hw_set_mem_tag_range((void *)addr, size, value, init); |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 377 | } |
| 378 | |
Andrey Konovalov | aa5c219 | 2021-04-29 22:59:59 -0700 | [diff] [blame] | 379 | static inline void kasan_unpoison(const void *addr, size_t size, bool init) |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 380 | { |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 381 | u8 tag = get_tag(addr); |
Alexander Potapenko | 2b83052 | 2021-02-25 17:19:21 -0800 | [diff] [blame] | 382 | |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 383 | addr = kasan_reset_tag(addr); |
Alexander Potapenko | 2b83052 | 2021-02-25 17:19:21 -0800 | [diff] [blame] | 384 | |
| 385 | /* Skip KFENCE memory if called explicitly outside of sl*b. */ |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 386 | if (is_kfence_address(addr)) |
Alexander Potapenko | 2b83052 | 2021-02-25 17:19:21 -0800 | [diff] [blame] | 387 | return; |
| 388 | |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 389 | if (WARN_ON((unsigned long)addr & KASAN_GRANULE_MASK)) |
| 390 | return; |
Yee Lee | 77a63c6 | 2021-07-14 21:26:37 -0700 | [diff] [blame] | 391 | /* |
| 392 | * Explicitly initialize the memory with the precise object size to |
| 393 | * avoid overwriting the SLAB redzone. This disables initialization in |
| 394 | * the arch code and may thus lead to performance penalty. The penalty |
| 395 | * is accepted since SLAB redzones aren't enabled in production builds. |
| 396 | */ |
| 397 | if (__slub_debug_enabled() && |
| 398 | init && ((unsigned long)size & KASAN_GRANULE_MASK)) { |
| 399 | init = false; |
| 400 | memzero_explicit((void *)addr, size); |
| 401 | } |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 402 | size = round_up(size, KASAN_GRANULE_SIZE); |
| 403 | |
Andrey Konovalov | aa5c219 | 2021-04-29 22:59:59 -0700 | [diff] [blame] | 404 | hw_set_mem_tag_range((void *)addr, size, tag, init); |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 405 | } |
| 406 | |
Andrey Konovalov | 611806b | 2021-02-24 12:05:50 -0800 | [diff] [blame] | 407 | static inline bool kasan_byte_accessible(const void *addr) |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 408 | { |
| 409 | u8 ptr_tag = get_tag(addr); |
Andrey Konovalov | 611806b | 2021-02-24 12:05:50 -0800 | [diff] [blame] | 410 | u8 mem_tag = hw_get_mem_tag((void *)addr); |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 411 | |
Peter Collingbourne | bfcfe37 | 2021-04-29 22:59:46 -0700 | [diff] [blame] | 412 | return ptr_tag == KASAN_TAG_KERNEL || ptr_tag == mem_tag; |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 413 | } |
| 414 | |
| 415 | #else /* CONFIG_KASAN_HW_TAGS */ |
| 416 | |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 417 | /** |
Ingo Molnar | f0953a1 | 2021-05-06 18:06:47 -0700 | [diff] [blame] | 418 | * kasan_poison - mark the memory range as inaccessible |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 419 | * @addr - range start address, must be aligned to KASAN_GRANULE_SIZE |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 420 | * @size - range size, must be aligned to KASAN_GRANULE_SIZE |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 421 | * @value - value that's written to metadata for the range |
Andrey Konovalov | aa5c219 | 2021-04-29 22:59:59 -0700 | [diff] [blame] | 422 | * @init - whether to initialize the memory range (only for hardware tag-based) |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 423 | * |
| 424 | * The size gets aligned to KASAN_GRANULE_SIZE before marking the range. |
| 425 | */ |
Andrey Konovalov | aa5c219 | 2021-04-29 22:59:59 -0700 | [diff] [blame] | 426 | void kasan_poison(const void *addr, size_t size, u8 value, bool init); |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 427 | |
| 428 | /** |
| 429 | * kasan_unpoison - mark the memory range as accessible |
| 430 | * @addr - range start address, must be aligned to KASAN_GRANULE_SIZE |
Andrey Konovalov | cde8a7e | 2021-02-25 17:20:27 -0800 | [diff] [blame] | 431 | * @size - range size, can be unaligned |
Andrey Konovalov | aa5c219 | 2021-04-29 22:59:59 -0700 | [diff] [blame] | 432 | * @init - whether to initialize the memory range (only for hardware tag-based) |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 433 | * |
| 434 | * For the tag-based modes, the @size gets aligned to KASAN_GRANULE_SIZE before |
| 435 | * marking the range. |
| 436 | * For the generic mode, the last granule of the memory range gets partially |
| 437 | * unpoisoned based on the @size. |
| 438 | */ |
Andrey Konovalov | aa5c219 | 2021-04-29 22:59:59 -0700 | [diff] [blame] | 439 | void kasan_unpoison(const void *addr, size_t size, bool init); |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 440 | |
Andrey Konovalov | 611806b | 2021-02-24 12:05:50 -0800 | [diff] [blame] | 441 | bool kasan_byte_accessible(const void *addr); |
Andrey Konovalov | 57345fa | 2020-12-22 12:03:03 -0800 | [diff] [blame] | 442 | |
| 443 | #endif /* CONFIG_KASAN_HW_TAGS */ |
| 444 | |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 445 | #ifdef CONFIG_KASAN_GENERIC |
| 446 | |
| 447 | /** |
| 448 | * kasan_poison_last_granule - mark the last granule of the memory range as |
Ingo Molnar | f0953a1 | 2021-05-06 18:06:47 -0700 | [diff] [blame] | 449 | * inaccessible |
Andrey Konovalov | e2db1a9 | 2021-02-25 17:19:59 -0800 | [diff] [blame] | 450 | * @addr - range start address, must be aligned to KASAN_GRANULE_SIZE |
| 451 | * @size - range size |
| 452 | * |
| 453 | * This function is only available for the generic mode, as it's the only mode |
| 454 | * that has partially poisoned memory granules. |
| 455 | */ |
| 456 | void kasan_poison_last_granule(const void *address, size_t size); |
| 457 | |
| 458 | #else /* CONFIG_KASAN_GENERIC */ |
| 459 | |
| 460 | static inline void kasan_poison_last_granule(const void *address, size_t size) { } |
| 461 | |
| 462 | #endif /* CONFIG_KASAN_GENERIC */ |
| 463 | |
Daniel Axtens | af3751f3 | 2021-06-28 19:40:42 -0700 | [diff] [blame] | 464 | #ifndef kasan_arch_is_ready |
| 465 | static inline bool kasan_arch_is_ready(void) { return true; } |
| 466 | #elif !defined(CONFIG_KASAN_GENERIC) || !defined(CONFIG_KASAN_OUTLINE) |
| 467 | #error kasan_arch_is_ready only works in KASAN generic outline mode! |
| 468 | #endif |
| 469 | |
Alexander Potapenko | d321599 | 2018-02-06 15:36:20 -0800 | [diff] [blame] | 470 | /* |
| 471 | * Exported functions for interfaces called from assembly or from generated |
| 472 | * code. Declarations here to avoid warning about missing declarations. |
| 473 | */ |
| 474 | asmlinkage void kasan_unpoison_task_stack_below(const void *watermark); |
| 475 | void __asan_register_globals(struct kasan_global *globals, size_t size); |
| 476 | void __asan_unregister_globals(struct kasan_global *globals, size_t size); |
Alexander Potapenko | d321599 | 2018-02-06 15:36:20 -0800 | [diff] [blame] | 477 | void __asan_handle_no_return(void); |
Alexander Potapenko | d321599 | 2018-02-06 15:36:20 -0800 | [diff] [blame] | 478 | void __asan_alloca_poison(unsigned long addr, size_t size); |
| 479 | void __asan_allocas_unpoison(const void *stack_top, const void *stack_bottom); |
| 480 | |
| 481 | void __asan_load1(unsigned long addr); |
| 482 | void __asan_store1(unsigned long addr); |
| 483 | void __asan_load2(unsigned long addr); |
| 484 | void __asan_store2(unsigned long addr); |
| 485 | void __asan_load4(unsigned long addr); |
| 486 | void __asan_store4(unsigned long addr); |
| 487 | void __asan_load8(unsigned long addr); |
| 488 | void __asan_store8(unsigned long addr); |
| 489 | void __asan_load16(unsigned long addr); |
| 490 | void __asan_store16(unsigned long addr); |
Andrey Konovalov | 13cf048 | 2020-05-13 17:50:54 -0700 | [diff] [blame] | 491 | void __asan_loadN(unsigned long addr, size_t size); |
| 492 | void __asan_storeN(unsigned long addr, size_t size); |
Alexander Potapenko | d321599 | 2018-02-06 15:36:20 -0800 | [diff] [blame] | 493 | |
| 494 | void __asan_load1_noabort(unsigned long addr); |
| 495 | void __asan_store1_noabort(unsigned long addr); |
| 496 | void __asan_load2_noabort(unsigned long addr); |
| 497 | void __asan_store2_noabort(unsigned long addr); |
| 498 | void __asan_load4_noabort(unsigned long addr); |
| 499 | void __asan_store4_noabort(unsigned long addr); |
| 500 | void __asan_load8_noabort(unsigned long addr); |
| 501 | void __asan_store8_noabort(unsigned long addr); |
| 502 | void __asan_load16_noabort(unsigned long addr); |
| 503 | void __asan_store16_noabort(unsigned long addr); |
Andrey Konovalov | 13cf048 | 2020-05-13 17:50:54 -0700 | [diff] [blame] | 504 | void __asan_loadN_noabort(unsigned long addr, size_t size); |
| 505 | void __asan_storeN_noabort(unsigned long addr, size_t size); |
| 506 | |
| 507 | void __asan_report_load1_noabort(unsigned long addr); |
| 508 | void __asan_report_store1_noabort(unsigned long addr); |
| 509 | void __asan_report_load2_noabort(unsigned long addr); |
| 510 | void __asan_report_store2_noabort(unsigned long addr); |
| 511 | void __asan_report_load4_noabort(unsigned long addr); |
| 512 | void __asan_report_store4_noabort(unsigned long addr); |
| 513 | void __asan_report_load8_noabort(unsigned long addr); |
| 514 | void __asan_report_store8_noabort(unsigned long addr); |
| 515 | void __asan_report_load16_noabort(unsigned long addr); |
| 516 | void __asan_report_store16_noabort(unsigned long addr); |
| 517 | void __asan_report_load_n_noabort(unsigned long addr, size_t size); |
| 518 | void __asan_report_store_n_noabort(unsigned long addr, size_t size); |
Alexander Potapenko | d321599 | 2018-02-06 15:36:20 -0800 | [diff] [blame] | 519 | |
| 520 | void __asan_set_shadow_00(const void *addr, size_t size); |
| 521 | void __asan_set_shadow_f1(const void *addr, size_t size); |
| 522 | void __asan_set_shadow_f2(const void *addr, size_t size); |
| 523 | void __asan_set_shadow_f3(const void *addr, size_t size); |
| 524 | void __asan_set_shadow_f5(const void *addr, size_t size); |
| 525 | void __asan_set_shadow_f8(const void *addr, size_t size); |
| 526 | |
Andrey Konovalov | 13cf048 | 2020-05-13 17:50:54 -0700 | [diff] [blame] | 527 | void __hwasan_load1_noabort(unsigned long addr); |
| 528 | void __hwasan_store1_noabort(unsigned long addr); |
| 529 | void __hwasan_load2_noabort(unsigned long addr); |
| 530 | void __hwasan_store2_noabort(unsigned long addr); |
| 531 | void __hwasan_load4_noabort(unsigned long addr); |
| 532 | void __hwasan_store4_noabort(unsigned long addr); |
| 533 | void __hwasan_load8_noabort(unsigned long addr); |
| 534 | void __hwasan_store8_noabort(unsigned long addr); |
| 535 | void __hwasan_load16_noabort(unsigned long addr); |
| 536 | void __hwasan_store16_noabort(unsigned long addr); |
| 537 | void __hwasan_loadN_noabort(unsigned long addr, size_t size); |
| 538 | void __hwasan_storeN_noabort(unsigned long addr, size_t size); |
| 539 | |
| 540 | void __hwasan_tag_memory(unsigned long addr, u8 tag, unsigned long size); |
| 541 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 542 | #endif |