blob: 165be7f9a96447bc87138e0e4392d7b33f4868d8 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Richard Weinbergera3a85a72012-03-29 18:47:46 +02002#ifndef _ASM_UM_BARRIER_H_
3#define _ASM_UM_BARRIER_H_
4
Johannes Berga30cc142019-09-11 14:51:18 +02005#include <asm/alternative.h>
Richard Weinbergera3a85a72012-03-29 18:47:46 +02006
7/*
8 * Force strict CPU ordering.
9 * And yes, this is required on UP too when we're talking
10 * to devices.
11 */
12#ifdef CONFIG_X86_32
13
14#define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
15#define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
16#define wmb() alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
17
18#else /* CONFIG_X86_32 */
19
20#define mb() asm volatile("mfence" : : : "memory")
21#define rmb() asm volatile("lfence" : : : "memory")
22#define wmb() asm volatile("sfence" : : : "memory")
23
24#endif /* CONFIG_X86_32 */
25
Michael S. Tsirkin577f1832015-12-21 09:22:18 +020026#include <asm-generic/barrier.h>
Alexander Duyck8a449712014-12-11 15:01:55 -080027
Richard Weinbergera3a85a72012-03-29 18:47:46 +020028#endif