Rusty Russell | 61d0b5a | 2013-03-18 13:22:19 +1030 | [diff] [blame] | 1 | #if defined(__i386__) || defined(__x86_64__) |
| 2 | #define barrier() asm volatile("" ::: "memory") |
Michael S. Tsirkin | a7c4903 | 2016-01-20 21:12:58 +0200 | [diff] [blame^] | 3 | #define virt_mb() __sync_synchronize() |
| 4 | #define virt_rmb() barrier() |
| 5 | #define virt_wmb() barrier() |
| 6 | /* Atomic store should be enough, but gcc generates worse code in that case. */ |
| 7 | #define virt_store_mb(var, value) do { \ |
| 8 | typeof(var) virt_store_mb_value = (value); \ |
| 9 | __atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \ |
| 10 | __ATOMIC_SEQ_CST); \ |
| 11 | barrier(); \ |
| 12 | } while (0); |
Rusty Russell | 61d0b5a | 2013-03-18 13:22:19 +1030 | [diff] [blame] | 13 | /* Weak barriers should be used. If not - it's a bug */ |
Michael S. Tsirkin | a7c4903 | 2016-01-20 21:12:58 +0200 | [diff] [blame^] | 14 | # define mb() abort() |
| 15 | # define rmb() abort() |
| 16 | # define wmb() abort() |
Rusty Russell | 61d0b5a | 2013-03-18 13:22:19 +1030 | [diff] [blame] | 17 | #else |
| 18 | #error Please fill in barrier macros |
| 19 | #endif |
| 20 | |