blob: 4da3369656987cd67873e965a700c8f3c01c08d0 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Richard Weinbergera3a85a72012-03-29 18:47:46 +02002#ifndef _ASM_UM_BARRIER_H_
3#define _ASM_UM_BARRIER_H_
4
Al Viro2098e212021-09-20 21:32:51 +00005#include <asm/cpufeatures.h>
Johannes Berga30cc142019-09-11 14:51:18 +02006#include <asm/alternative.h>
Richard Weinbergera3a85a72012-03-29 18:47:46 +02007
8/*
9 * Force strict CPU ordering.
10 * And yes, this is required on UP too when we're talking
11 * to devices.
12 */
13#ifdef CONFIG_X86_32
14
15#define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
16#define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
17#define wmb() alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
18
19#else /* CONFIG_X86_32 */
20
21#define mb() asm volatile("mfence" : : : "memory")
22#define rmb() asm volatile("lfence" : : : "memory")
23#define wmb() asm volatile("sfence" : : : "memory")
24
25#endif /* CONFIG_X86_32 */
26
Michael S. Tsirkin577f1832015-12-21 09:22:18 +020027#include <asm-generic/barrier.h>
Alexander Duyck8a449712014-12-11 15:01:55 -080028
Richard Weinbergera3a85a72012-03-29 18:47:46 +020029#endif