Marco Elver | dfd402a | 2019-11-14 19:02:54 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | |
| 3 | #ifndef _KERNEL_KCSAN_ATOMIC_H |
| 4 | #define _KERNEL_KCSAN_ATOMIC_H |
| 5 | |
Marco Elver | 7e76656 | 2020-06-16 14:36:24 +0200 | [diff] [blame] | 6 | #include <linux/types.h> |
Marco Elver | dfd402a | 2019-11-14 19:02:54 +0100 | [diff] [blame] | 7 | |
| 8 | /* |
Marco Elver | 44656d3 | 2020-02-25 15:32:58 +0100 | [diff] [blame] | 9 | * Special rules for certain memory where concurrent conflicting accesses are |
| 10 | * common, however, the current convention is to not mark them; returns true if |
| 11 | * access to @ptr should be considered atomic. Called from slow-path. |
Marco Elver | dfd402a | 2019-11-14 19:02:54 +0100 | [diff] [blame] | 12 | */ |
Marco Elver | 44656d3 | 2020-02-25 15:32:58 +0100 | [diff] [blame] | 13 | static bool kcsan_is_atomic_special(const volatile void *ptr) |
Marco Elver | dfd402a | 2019-11-14 19:02:54 +0100 | [diff] [blame] | 14 | { |
Marco Elver | 7e76656 | 2020-06-16 14:36:24 +0200 | [diff] [blame] | 15 | return false; |
Marco Elver | dfd402a | 2019-11-14 19:02:54 +0100 | [diff] [blame] | 16 | } |
| 17 | |
| 18 | #endif /* _KERNEL_KCSAN_ATOMIC_H */ |