blob: 04d563fc9b95ca0e80d323e271473492400f7273 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Michael S. Tsirkin770f3592020-04-06 16:20:45 -04002#include <stdlib.h>
Rusty Russell61d0b5a2013-03-18 13:22:19 +10303#if defined(__i386__) || defined(__x86_64__)
4#define barrier() asm volatile("" ::: "memory")
Michael S. Tsirkina7c49032016-01-20 21:12:58 +02005#define virt_mb() __sync_synchronize()
6#define virt_rmb() barrier()
7#define virt_wmb() barrier()
8/* Atomic store should be enough, but gcc generates worse code in that case. */
9#define virt_store_mb(var, value) do { \
10 typeof(var) virt_store_mb_value = (value); \
11 __atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \
12 __ATOMIC_SEQ_CST); \
13 barrier(); \
14} while (0);
Rusty Russell61d0b5a2013-03-18 13:22:19 +103015/* Weak barriers should be used. If not - it's a bug */
Michael S. Tsirkina7c49032016-01-20 21:12:58 +020016# define mb() abort()
Michael S. Tsirkin8129e2a2018-07-25 16:35:09 +030017# define dma_rmb() abort()
18# define dma_wmb() abort()
Rusty Russell61d0b5a2013-03-18 13:22:19 +103019#else
20#error Please fill in barrier macros
21#endif
22