Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 2 | #ifndef _LINUX_KASAN_H |
| 3 | #define _LINUX_KASAN_H |
| 4 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 5 | #include <linux/static_key.h> |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 6 | #include <linux/types.h> |
| 7 | |
| 8 | struct kmem_cache; |
| 9 | struct page; |
Andrey Ryabinin | a5af5aa | 2015-03-12 16:26:11 -0700 | [diff] [blame] | 10 | struct vm_struct; |
Masami Hiramatsu | 5be9b73 | 2017-03-16 16:40:21 -0700 | [diff] [blame] | 11 | struct task_struct; |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 12 | |
| 13 | #ifdef CONFIG_KASAN |
| 14 | |
Andrey Konovalov | f4fb115 | 2020-12-22 12:00:17 -0800 | [diff] [blame] | 15 | #include <linux/linkage.h> |
Mike Rapoport | 65fddcf | 2020-06-08 21:32:42 -0700 | [diff] [blame] | 16 | #include <asm/kasan.h> |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 17 | |
Patricia Alfonso | 83c4e7a | 2020-10-13 16:55:02 -0700 | [diff] [blame] | 18 | /* kasan_data struct is used in KUnit tests for KASAN expected failures */ |
| 19 | struct kunit_kasan_expectation { |
| 20 | bool report_expected; |
| 21 | bool report_found; |
| 22 | }; |
| 23 | |
Andrey Konovalov | f4fb115 | 2020-12-22 12:00:17 -0800 | [diff] [blame] | 24 | #endif |
| 25 | |
| 26 | #if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS) |
| 27 | |
| 28 | #include <linux/pgtable.h> |
| 29 | |
| 30 | /* Software KASAN implementations use shadow memory. */ |
| 31 | |
| 32 | #ifdef CONFIG_KASAN_SW_TAGS |
| 33 | #define KASAN_SHADOW_INIT 0xFF |
| 34 | #else |
| 35 | #define KASAN_SHADOW_INIT 0 |
| 36 | #endif |
| 37 | |
Andrey Konovalov | 9577dd7 | 2018-12-28 00:30:01 -0800 | [diff] [blame] | 38 | extern unsigned char kasan_early_shadow_page[PAGE_SIZE]; |
| 39 | extern pte_t kasan_early_shadow_pte[PTRS_PER_PTE]; |
| 40 | extern pmd_t kasan_early_shadow_pmd[PTRS_PER_PMD]; |
| 41 | extern pud_t kasan_early_shadow_pud[PTRS_PER_PUD]; |
| 42 | extern p4d_t kasan_early_shadow_p4d[MAX_PTRS_PER_P4D]; |
Andrey Ryabinin | 69786cdb | 2015-08-13 08:37:24 +0300 | [diff] [blame] | 43 | |
Andrey Konovalov | 9577dd7 | 2018-12-28 00:30:01 -0800 | [diff] [blame] | 44 | int kasan_populate_early_shadow(const void *shadow_start, |
Andrey Ryabinin | 69786cdb | 2015-08-13 08:37:24 +0300 | [diff] [blame] | 45 | const void *shadow_end); |
| 46 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 47 | static inline void *kasan_mem_to_shadow(const void *addr) |
| 48 | { |
| 49 | return (void *)((unsigned long)addr >> KASAN_SHADOW_SCALE_SHIFT) |
| 50 | + KASAN_SHADOW_OFFSET; |
| 51 | } |
| 52 | |
Andrey Konovalov | f4fb115 | 2020-12-22 12:00:17 -0800 | [diff] [blame] | 53 | int kasan_add_zero_shadow(void *start, unsigned long size); |
| 54 | void kasan_remove_zero_shadow(void *start, unsigned long size); |
| 55 | |
Andrey Konovalov | e0ae114 | 2020-12-22 12:00:56 -0800 | [diff] [blame] | 56 | /* Enable reporting bugs after kasan_disable_current() */ |
| 57 | extern void kasan_enable_current(void); |
| 58 | |
| 59 | /* Disable reporting bugs for current task */ |
| 60 | extern void kasan_disable_current(void); |
| 61 | |
Andrey Konovalov | f4fb115 | 2020-12-22 12:00:17 -0800 | [diff] [blame] | 62 | #else /* CONFIG_KASAN_GENERIC || CONFIG_KASAN_SW_TAGS */ |
| 63 | |
| 64 | static inline int kasan_add_zero_shadow(void *start, unsigned long size) |
| 65 | { |
| 66 | return 0; |
| 67 | } |
| 68 | static inline void kasan_remove_zero_shadow(void *start, |
| 69 | unsigned long size) |
| 70 | {} |
| 71 | |
Andrey Konovalov | e0ae114 | 2020-12-22 12:00:56 -0800 | [diff] [blame] | 72 | static inline void kasan_enable_current(void) {} |
| 73 | static inline void kasan_disable_current(void) {} |
| 74 | |
Andrey Konovalov | f4fb115 | 2020-12-22 12:00:17 -0800 | [diff] [blame] | 75 | #endif /* CONFIG_KASAN_GENERIC || CONFIG_KASAN_SW_TAGS */ |
| 76 | |
| 77 | #ifdef CONFIG_KASAN |
| 78 | |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 79 | struct kasan_cache { |
| 80 | int alloc_meta_offset; |
| 81 | int free_meta_offset; |
Andrey Konovalov | ef8fe24 | 2021-02-12 17:14:49 +1100 | [diff] [blame^] | 82 | bool is_kmalloc; |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 83 | }; |
| 84 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 85 | #ifdef CONFIG_KASAN_HW_TAGS |
Andrey Konovalov | 6d847f8 | 2020-12-22 12:03:31 -0800 | [diff] [blame] | 86 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 87 | DECLARE_STATIC_KEY_FALSE(kasan_flag_enabled); |
Andrey Konovalov | 6d847f8 | 2020-12-22 12:03:31 -0800 | [diff] [blame] | 88 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 89 | static __always_inline bool kasan_enabled(void) |
| 90 | { |
| 91 | return static_branch_likely(&kasan_flag_enabled); |
| 92 | } |
Andrey Konovalov | 6d847f8 | 2020-12-22 12:03:31 -0800 | [diff] [blame] | 93 | |
| 94 | #else /* CONFIG_KASAN_HW_TAGS */ |
| 95 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 96 | static inline bool kasan_enabled(void) |
| 97 | { |
| 98 | return true; |
| 99 | } |
Andrey Konovalov | 6d847f8 | 2020-12-22 12:03:31 -0800 | [diff] [blame] | 100 | |
| 101 | #endif /* CONFIG_KASAN_HW_TAGS */ |
| 102 | |
| 103 | slab_flags_t __kasan_never_merge(void); |
| 104 | static __always_inline slab_flags_t kasan_never_merge(void) |
| 105 | { |
| 106 | if (kasan_enabled()) |
| 107 | return __kasan_never_merge(); |
| 108 | return 0; |
| 109 | } |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 110 | |
| 111 | void __kasan_unpoison_range(const void *addr, size_t size); |
| 112 | static __always_inline void kasan_unpoison_range(const void *addr, size_t size) |
| 113 | { |
| 114 | if (kasan_enabled()) |
| 115 | __kasan_unpoison_range(addr, size); |
| 116 | } |
| 117 | |
| 118 | void __kasan_alloc_pages(struct page *page, unsigned int order); |
| 119 | static __always_inline void kasan_alloc_pages(struct page *page, |
| 120 | unsigned int order) |
| 121 | { |
| 122 | if (kasan_enabled()) |
| 123 | __kasan_alloc_pages(page, order); |
| 124 | } |
| 125 | |
| 126 | void __kasan_free_pages(struct page *page, unsigned int order); |
| 127 | static __always_inline void kasan_free_pages(struct page *page, |
| 128 | unsigned int order) |
| 129 | { |
| 130 | if (kasan_enabled()) |
| 131 | __kasan_free_pages(page, order); |
| 132 | } |
| 133 | |
| 134 | void __kasan_cache_create(struct kmem_cache *cache, unsigned int *size, |
| 135 | slab_flags_t *flags); |
| 136 | static __always_inline void kasan_cache_create(struct kmem_cache *cache, |
| 137 | unsigned int *size, slab_flags_t *flags) |
| 138 | { |
| 139 | if (kasan_enabled()) |
| 140 | __kasan_cache_create(cache, size, flags); |
| 141 | } |
| 142 | |
Andrey Konovalov | ef8fe24 | 2021-02-12 17:14:49 +1100 | [diff] [blame^] | 143 | void __kasan_cache_create_kmalloc(struct kmem_cache *cache); |
| 144 | static __always_inline void kasan_cache_create_kmalloc(struct kmem_cache *cache) |
| 145 | { |
| 146 | if (kasan_enabled()) |
| 147 | __kasan_cache_create_kmalloc(cache); |
| 148 | } |
| 149 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 150 | size_t __kasan_metadata_size(struct kmem_cache *cache); |
| 151 | static __always_inline size_t kasan_metadata_size(struct kmem_cache *cache) |
| 152 | { |
| 153 | if (kasan_enabled()) |
| 154 | return __kasan_metadata_size(cache); |
| 155 | return 0; |
| 156 | } |
| 157 | |
| 158 | void __kasan_poison_slab(struct page *page); |
| 159 | static __always_inline void kasan_poison_slab(struct page *page) |
| 160 | { |
| 161 | if (kasan_enabled()) |
| 162 | __kasan_poison_slab(page); |
| 163 | } |
| 164 | |
| 165 | void __kasan_unpoison_object_data(struct kmem_cache *cache, void *object); |
| 166 | static __always_inline void kasan_unpoison_object_data(struct kmem_cache *cache, |
| 167 | void *object) |
| 168 | { |
| 169 | if (kasan_enabled()) |
| 170 | __kasan_unpoison_object_data(cache, object); |
| 171 | } |
| 172 | |
| 173 | void __kasan_poison_object_data(struct kmem_cache *cache, void *object); |
| 174 | static __always_inline void kasan_poison_object_data(struct kmem_cache *cache, |
| 175 | void *object) |
| 176 | { |
| 177 | if (kasan_enabled()) |
| 178 | __kasan_poison_object_data(cache, object); |
| 179 | } |
| 180 | |
| 181 | void * __must_check __kasan_init_slab_obj(struct kmem_cache *cache, |
| 182 | const void *object); |
| 183 | static __always_inline void * __must_check kasan_init_slab_obj( |
| 184 | struct kmem_cache *cache, const void *object) |
| 185 | { |
| 186 | if (kasan_enabled()) |
| 187 | return __kasan_init_slab_obj(cache, object); |
| 188 | return (void *)object; |
| 189 | } |
| 190 | |
| 191 | bool __kasan_slab_free(struct kmem_cache *s, void *object, unsigned long ip); |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 192 | static __always_inline bool kasan_slab_free(struct kmem_cache *s, void *object) |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 193 | { |
| 194 | if (kasan_enabled()) |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 195 | return __kasan_slab_free(s, object, _RET_IP_); |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 196 | return false; |
| 197 | } |
| 198 | |
Andrey Konovalov | 1d986f3 | 2020-12-22 12:03:13 -0800 | [diff] [blame] | 199 | void __kasan_slab_free_mempool(void *ptr, unsigned long ip); |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 200 | static __always_inline void kasan_slab_free_mempool(void *ptr) |
Andrey Konovalov | 1d986f3 | 2020-12-22 12:03:13 -0800 | [diff] [blame] | 201 | { |
| 202 | if (kasan_enabled()) |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 203 | __kasan_slab_free_mempool(ptr, _RET_IP_); |
Andrey Konovalov | 1d986f3 | 2020-12-22 12:03:13 -0800 | [diff] [blame] | 204 | } |
| 205 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 206 | void * __must_check __kasan_slab_alloc(struct kmem_cache *s, |
| 207 | void *object, gfp_t flags); |
| 208 | static __always_inline void * __must_check kasan_slab_alloc( |
| 209 | struct kmem_cache *s, void *object, gfp_t flags) |
| 210 | { |
| 211 | if (kasan_enabled()) |
| 212 | return __kasan_slab_alloc(s, object, flags); |
| 213 | return object; |
| 214 | } |
| 215 | |
| 216 | void * __must_check __kasan_kmalloc(struct kmem_cache *s, const void *object, |
| 217 | size_t size, gfp_t flags); |
| 218 | static __always_inline void * __must_check kasan_kmalloc(struct kmem_cache *s, |
| 219 | const void *object, size_t size, gfp_t flags) |
| 220 | { |
| 221 | if (kasan_enabled()) |
| 222 | return __kasan_kmalloc(s, object, size, flags); |
| 223 | return (void *)object; |
| 224 | } |
| 225 | |
| 226 | void * __must_check __kasan_kmalloc_large(const void *ptr, |
| 227 | size_t size, gfp_t flags); |
| 228 | static __always_inline void * __must_check kasan_kmalloc_large(const void *ptr, |
| 229 | size_t size, gfp_t flags) |
| 230 | { |
| 231 | if (kasan_enabled()) |
| 232 | return __kasan_kmalloc_large(ptr, size, flags); |
| 233 | return (void *)ptr; |
| 234 | } |
| 235 | |
| 236 | void * __must_check __kasan_krealloc(const void *object, |
| 237 | size_t new_size, gfp_t flags); |
| 238 | static __always_inline void * __must_check kasan_krealloc(const void *object, |
| 239 | size_t new_size, gfp_t flags) |
| 240 | { |
| 241 | if (kasan_enabled()) |
| 242 | return __kasan_krealloc(object, new_size, flags); |
| 243 | return (void *)object; |
| 244 | } |
| 245 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 246 | void __kasan_kfree_large(void *ptr, unsigned long ip); |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 247 | static __always_inline void kasan_kfree_large(void *ptr) |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 248 | { |
| 249 | if (kasan_enabled()) |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 250 | __kasan_kfree_large(ptr, _RET_IP_); |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 251 | } |
Andrey Ryabinin | 9b75a86 | 2016-06-24 14:49:34 -0700 | [diff] [blame] | 252 | |
Andrey Konovalov | 696574e | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 253 | /* |
| 254 | * Unlike kasan_check_read/write(), kasan_check_byte() is performed even for |
| 255 | * the hardware tag-based mode that doesn't rely on compiler instrumentation. |
| 256 | */ |
| 257 | bool __kasan_check_byte(const void *addr, unsigned long ip); |
| 258 | static __always_inline bool kasan_check_byte(const void *addr) |
| 259 | { |
| 260 | if (kasan_enabled()) |
| 261 | return __kasan_check_byte(addr, _RET_IP_); |
| 262 | return true; |
| 263 | } |
| 264 | |
| 265 | |
Mark Rutland | b0845ce | 2017-03-31 15:12:04 -0700 | [diff] [blame] | 266 | bool kasan_save_enable_multi_shot(void); |
| 267 | void kasan_restore_multi_shot(bool enabled); |
| 268 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 269 | #else /* CONFIG_KASAN */ |
| 270 | |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 271 | static inline bool kasan_enabled(void) |
| 272 | { |
| 273 | return false; |
| 274 | } |
Andrey Konovalov | 6d847f8 | 2020-12-22 12:03:31 -0800 | [diff] [blame] | 275 | static inline slab_flags_t kasan_never_merge(void) |
| 276 | { |
| 277 | return 0; |
| 278 | } |
Andrey Konovalov | 37f77a6 | 2020-12-22 12:00:21 -0800 | [diff] [blame] | 279 | static inline void kasan_unpoison_range(const void *address, size_t size) {} |
Andrey Ryabinin | b8c73fc | 2015-02-13 14:39:28 -0800 | [diff] [blame] | 280 | static inline void kasan_alloc_pages(struct page *page, unsigned int order) {} |
| 281 | static inline void kasan_free_pages(struct page *page, unsigned int order) {} |
Alexander Potapenko | 7ed2f9e | 2016-03-25 14:21:59 -0700 | [diff] [blame] | 282 | static inline void kasan_cache_create(struct kmem_cache *cache, |
Alexey Dobriyan | be4a798 | 2018-04-05 16:21:28 -0700 | [diff] [blame] | 283 | unsigned int *size, |
Alexey Dobriyan | d50112e | 2017-11-15 17:32:18 -0800 | [diff] [blame] | 284 | slab_flags_t *flags) {} |
Andrey Konovalov | ef8fe24 | 2021-02-12 17:14:49 +1100 | [diff] [blame^] | 285 | static inline void kasan_cache_create_kmalloc(struct kmem_cache *cache) {} |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 286 | static inline size_t kasan_metadata_size(struct kmem_cache *cache) { return 0; } |
Andrey Ryabinin | 0316bec | 2015-02-13 14:39:42 -0800 | [diff] [blame] | 287 | static inline void kasan_poison_slab(struct page *page) {} |
| 288 | static inline void kasan_unpoison_object_data(struct kmem_cache *cache, |
| 289 | void *object) {} |
| 290 | static inline void kasan_poison_object_data(struct kmem_cache *cache, |
| 291 | void *object) {} |
Andrey Konovalov | 0116523 | 2018-12-28 00:29:37 -0800 | [diff] [blame] | 292 | static inline void *kasan_init_slab_obj(struct kmem_cache *cache, |
| 293 | const void *object) |
| 294 | { |
| 295 | return (void *)object; |
| 296 | } |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 297 | static inline bool kasan_slab_free(struct kmem_cache *s, void *object) |
Andrey Konovalov | 0116523 | 2018-12-28 00:29:37 -0800 | [diff] [blame] | 298 | { |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 299 | return false; |
Andrey Konovalov | 0116523 | 2018-12-28 00:29:37 -0800 | [diff] [blame] | 300 | } |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 301 | static inline void kasan_slab_free_mempool(void *ptr) {} |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 302 | static inline void *kasan_slab_alloc(struct kmem_cache *s, void *object, |
| 303 | gfp_t flags) |
| 304 | { |
| 305 | return object; |
| 306 | } |
Andrey Konovalov | 0116523 | 2018-12-28 00:29:37 -0800 | [diff] [blame] | 307 | static inline void *kasan_kmalloc(struct kmem_cache *s, const void *object, |
| 308 | size_t size, gfp_t flags) |
| 309 | { |
| 310 | return (void *)object; |
| 311 | } |
Andrey Konovalov | 4e35a81 | 2020-12-22 12:03:10 -0800 | [diff] [blame] | 312 | static inline void *kasan_kmalloc_large(const void *ptr, size_t size, gfp_t flags) |
| 313 | { |
| 314 | return (void *)ptr; |
| 315 | } |
Andrey Konovalov | 0116523 | 2018-12-28 00:29:37 -0800 | [diff] [blame] | 316 | static inline void *kasan_krealloc(const void *object, size_t new_size, |
| 317 | gfp_t flags) |
| 318 | { |
| 319 | return (void *)object; |
| 320 | } |
Andrey Konovalov | 3cd65f5 | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 321 | static inline void kasan_kfree_large(void *ptr) {} |
Andrey Konovalov | 696574e | 2021-02-03 15:35:05 +1100 | [diff] [blame] | 322 | static inline bool kasan_check_byte(const void *address) |
| 323 | { |
| 324 | return true; |
| 325 | } |
Andrey Ryabinin | 9b75a86 | 2016-06-24 14:49:34 -0700 | [diff] [blame] | 326 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 327 | #endif /* CONFIG_KASAN */ |
| 328 | |
Walter Wu | 13aefe4 | 2021-02-03 15:35:08 +1100 | [diff] [blame] | 329 | #if defined(CONFIG_KASAN) && defined(CONFIG_KASAN_STACK) |
Andrey Konovalov | 36c15ef | 2020-12-22 12:02:42 -0800 | [diff] [blame] | 330 | void kasan_unpoison_task_stack(struct task_struct *task); |
| 331 | #else |
| 332 | static inline void kasan_unpoison_task_stack(struct task_struct *task) {} |
| 333 | #endif |
| 334 | |
Andrey Konovalov | 2bd926b | 2018-12-28 00:29:53 -0800 | [diff] [blame] | 335 | #ifdef CONFIG_KASAN_GENERIC |
| 336 | |
| 337 | void kasan_cache_shrink(struct kmem_cache *cache); |
| 338 | void kasan_cache_shutdown(struct kmem_cache *cache); |
Walter Wu | 26e760c | 2020-08-06 23:24:35 -0700 | [diff] [blame] | 339 | void kasan_record_aux_stack(void *ptr); |
Andrey Konovalov | 2bd926b | 2018-12-28 00:29:53 -0800 | [diff] [blame] | 340 | |
| 341 | #else /* CONFIG_KASAN_GENERIC */ |
| 342 | |
| 343 | static inline void kasan_cache_shrink(struct kmem_cache *cache) {} |
| 344 | static inline void kasan_cache_shutdown(struct kmem_cache *cache) {} |
Walter Wu | 26e760c | 2020-08-06 23:24:35 -0700 | [diff] [blame] | 345 | static inline void kasan_record_aux_stack(void *ptr) {} |
Andrey Konovalov | 2bd926b | 2018-12-28 00:29:53 -0800 | [diff] [blame] | 346 | |
| 347 | #endif /* CONFIG_KASAN_GENERIC */ |
| 348 | |
Andrey Konovalov | 1116716 | 2020-12-22 12:02:10 -0800 | [diff] [blame] | 349 | #if defined(CONFIG_KASAN_SW_TAGS) || defined(CONFIG_KASAN_HW_TAGS) |
Andrey Konovalov | 3c9e3aa | 2018-12-28 00:30:16 -0800 | [diff] [blame] | 350 | |
Andrey Konovalov | 1e8a05e | 2020-12-22 12:02:52 -0800 | [diff] [blame] | 351 | static inline void *kasan_reset_tag(const void *addr) |
| 352 | { |
| 353 | return (void *)arch_kasan_reset_tag(addr); |
| 354 | } |
Andrey Konovalov | 3c9e3aa | 2018-12-28 00:30:16 -0800 | [diff] [blame] | 355 | |
Vincenzo Frascino | c648ed5 | 2021-02-04 18:32:49 -0800 | [diff] [blame] | 356 | /** |
| 357 | * kasan_report - print a report about a bad memory access detected by KASAN |
| 358 | * @addr: address of the bad access |
| 359 | * @size: size of the bad access |
| 360 | * @is_write: whether the bad access is a write or a read |
| 361 | * @ip: instruction pointer for the accessibility check or the bad access itself |
| 362 | */ |
Walter Wu | 8cceeff | 2020-04-01 21:09:37 -0700 | [diff] [blame] | 363 | bool kasan_report(unsigned long addr, size_t size, |
Andrey Konovalov | 41eea9c | 2018-12-28 00:30:54 -0800 | [diff] [blame] | 364 | bool is_write, unsigned long ip); |
| 365 | |
Andrey Konovalov | 1116716 | 2020-12-22 12:02:10 -0800 | [diff] [blame] | 366 | #else /* CONFIG_KASAN_SW_TAGS || CONFIG_KASAN_HW_TAGS */ |
Andrey Konovalov | 3c9e3aa | 2018-12-28 00:30:16 -0800 | [diff] [blame] | 367 | |
| 368 | static inline void *kasan_reset_tag(const void *addr) |
| 369 | { |
| 370 | return (void *)addr; |
| 371 | } |
| 372 | |
Andrey Konovalov | 1116716 | 2020-12-22 12:02:10 -0800 | [diff] [blame] | 373 | #endif /* CONFIG_KASAN_SW_TAGS || CONFIG_KASAN_HW_TAGS*/ |
| 374 | |
| 375 | #ifdef CONFIG_KASAN_SW_TAGS |
| 376 | void __init kasan_init_sw_tags(void); |
| 377 | #else |
| 378 | static inline void kasan_init_sw_tags(void) { } |
| 379 | #endif |
| 380 | |
| 381 | #ifdef CONFIG_KASAN_HW_TAGS |
| 382 | void kasan_init_hw_tags_cpu(void); |
| 383 | void __init kasan_init_hw_tags(void); |
| 384 | #else |
| 385 | static inline void kasan_init_hw_tags_cpu(void) { } |
| 386 | static inline void kasan_init_hw_tags(void) { } |
| 387 | #endif |
Andrey Konovalov | 080eb83 | 2018-12-28 00:30:09 -0800 | [diff] [blame] | 388 | |
Daniel Axtens | 3c5c3cf | 2019-11-30 17:54:50 -0800 | [diff] [blame] | 389 | #ifdef CONFIG_KASAN_VMALLOC |
Andrey Konovalov | 2487712 | 2020-12-22 12:00:14 -0800 | [diff] [blame] | 390 | |
Andrey Ryabinin | d98c9e8 | 2019-12-17 20:51:38 -0800 | [diff] [blame] | 391 | int kasan_populate_vmalloc(unsigned long addr, unsigned long size); |
| 392 | void kasan_poison_vmalloc(const void *start, unsigned long size); |
| 393 | void kasan_unpoison_vmalloc(const void *start, unsigned long size); |
Daniel Axtens | 3c5c3cf | 2019-11-30 17:54:50 -0800 | [diff] [blame] | 394 | void kasan_release_vmalloc(unsigned long start, unsigned long end, |
| 395 | unsigned long free_region_start, |
| 396 | unsigned long free_region_end); |
Andrey Konovalov | 2487712 | 2020-12-22 12:00:14 -0800 | [diff] [blame] | 397 | |
| 398 | #else /* CONFIG_KASAN_VMALLOC */ |
| 399 | |
Andrey Ryabinin | d98c9e8 | 2019-12-17 20:51:38 -0800 | [diff] [blame] | 400 | static inline int kasan_populate_vmalloc(unsigned long start, |
| 401 | unsigned long size) |
Daniel Axtens | 3c5c3cf | 2019-11-30 17:54:50 -0800 | [diff] [blame] | 402 | { |
| 403 | return 0; |
| 404 | } |
| 405 | |
Andrey Ryabinin | d98c9e8 | 2019-12-17 20:51:38 -0800 | [diff] [blame] | 406 | static inline void kasan_poison_vmalloc(const void *start, unsigned long size) |
| 407 | { } |
| 408 | static inline void kasan_unpoison_vmalloc(const void *start, unsigned long size) |
| 409 | { } |
Daniel Axtens | 3c5c3cf | 2019-11-30 17:54:50 -0800 | [diff] [blame] | 410 | static inline void kasan_release_vmalloc(unsigned long start, |
| 411 | unsigned long end, |
| 412 | unsigned long free_region_start, |
| 413 | unsigned long free_region_end) {} |
Andrey Konovalov | 2487712 | 2020-12-22 12:00:14 -0800 | [diff] [blame] | 414 | |
| 415 | #endif /* CONFIG_KASAN_VMALLOC */ |
| 416 | |
Andrey Konovalov | 67e914b | 2020-12-22 12:02:06 -0800 | [diff] [blame] | 417 | #if (defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS)) && \ |
| 418 | !defined(CONFIG_KASAN_VMALLOC) |
Andrey Konovalov | 2487712 | 2020-12-22 12:00:14 -0800 | [diff] [blame] | 419 | |
| 420 | /* |
| 421 | * These functions provide a special case to support backing module |
| 422 | * allocations with real shadow memory. With KASAN vmalloc, the special |
| 423 | * case is unnecessary, as the work is handled in the generic case. |
| 424 | */ |
| 425 | int kasan_module_alloc(void *addr, size_t size); |
| 426 | void kasan_free_shadow(const struct vm_struct *vm); |
| 427 | |
Andrey Konovalov | 67e914b | 2020-12-22 12:02:06 -0800 | [diff] [blame] | 428 | #else /* (CONFIG_KASAN_GENERIC || CONFIG_KASAN_SW_TAGS) && !CONFIG_KASAN_VMALLOC */ |
Andrey Konovalov | 2487712 | 2020-12-22 12:00:14 -0800 | [diff] [blame] | 429 | |
| 430 | static inline int kasan_module_alloc(void *addr, size_t size) { return 0; } |
| 431 | static inline void kasan_free_shadow(const struct vm_struct *vm) {} |
| 432 | |
Andrey Konovalov | 67e914b | 2020-12-22 12:02:06 -0800 | [diff] [blame] | 433 | #endif /* (CONFIG_KASAN_GENERIC || CONFIG_KASAN_SW_TAGS) && !CONFIG_KASAN_VMALLOC */ |
Daniel Axtens | 3c5c3cf | 2019-11-30 17:54:50 -0800 | [diff] [blame] | 434 | |
Jann Horn | 2f004ee | 2019-12-19 00:11:50 +0100 | [diff] [blame] | 435 | #ifdef CONFIG_KASAN_INLINE |
| 436 | void kasan_non_canonical_hook(unsigned long addr); |
| 437 | #else /* CONFIG_KASAN_INLINE */ |
| 438 | static inline void kasan_non_canonical_hook(unsigned long addr) { } |
| 439 | #endif /* CONFIG_KASAN_INLINE */ |
| 440 | |
Andrey Ryabinin | 0b24bec | 2015-02-13 14:39:17 -0800 | [diff] [blame] | 441 | #endif /* LINUX_KASAN_H */ |