Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Michael S. Tsirkin | 770f359 | 2020-04-06 16:20:45 -0400 | [diff] [blame] | 2 | #include <stdlib.h> |
Rusty Russell | 61d0b5a | 2013-03-18 13:22:19 +1030 | [diff] [blame] | 3 | #if defined(__i386__) || defined(__x86_64__) |
| 4 | #define barrier() asm volatile("" ::: "memory") |
Michael S. Tsirkin | a7c4903 | 2016-01-20 21:12:58 +0200 | [diff] [blame] | 5 | #define virt_mb() __sync_synchronize() |
| 6 | #define virt_rmb() barrier() |
| 7 | #define virt_wmb() barrier() |
| 8 | /* Atomic store should be enough, but gcc generates worse code in that case. */ |
| 9 | #define virt_store_mb(var, value) do { \ |
| 10 | typeof(var) virt_store_mb_value = (value); \ |
| 11 | __atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \ |
| 12 | __ATOMIC_SEQ_CST); \ |
| 13 | barrier(); \ |
| 14 | } while (0); |
Rusty Russell | 61d0b5a | 2013-03-18 13:22:19 +1030 | [diff] [blame] | 15 | /* Weak barriers should be used. If not - it's a bug */ |
Michael S. Tsirkin | a7c4903 | 2016-01-20 21:12:58 +0200 | [diff] [blame] | 16 | # define mb() abort() |
Michael S. Tsirkin | 8129e2a | 2018-07-25 16:35:09 +0300 | [diff] [blame] | 17 | # define dma_rmb() abort() |
| 18 | # define dma_wmb() abort() |
Rusty Russell | 61d0b5a | 2013-03-18 13:22:19 +1030 | [diff] [blame] | 19 | #else |
| 20 | #error Please fill in barrier macros |
| 21 | #endif |
| 22 | |