blob: c373f21efe4d9057507053e759958a290002c0dd [file] [log] [blame]
Kuninori Morimoto6a0abce2018-12-28 00:31:53 -08001/* SPDX-License-Identifier: GPL-2.0 */
Michael S. Tsirkin9e3f84c2016-01-08 09:23:58 +02002#ifndef __ASM_SH_CMPXCHG_XCHG_H
3#define __ASM_SH_CMPXCHG_XCHG_H
4
5/*
6 * Copyright (C) 2016 Red Hat, Inc.
7 * Author: Michael S. Tsirkin <mst@redhat.com>
Michael S. Tsirkin9e3f84c2016-01-08 09:23:58 +02008 */
Will Deacon1c2672b2018-06-19 13:53:10 +01009#include <linux/bits.h>
10#include <linux/compiler.h>
Michael S. Tsirkin9e3f84c2016-01-08 09:23:58 +020011#include <asm/byteorder.h>
12
13/*
14 * Portable implementations of 1 and 2 byte xchg using a 4 byte cmpxchg.
15 * Note: this header isn't self-contained: before including it, __cmpxchg_u32
16 * must be defined first.
17 */
18static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
19{
20 int off = (unsigned long)ptr % sizeof(u32);
21 volatile u32 *p = ptr - off;
22#ifdef __BIG_ENDIAN
Pan Xinhuiff181432016-04-20 14:41:00 +080023 int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
Michael S. Tsirkin9e3f84c2016-01-08 09:23:58 +020024#else
25 int bitoff = off * BITS_PER_BYTE;
26#endif
27 u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
28 u32 oldv, newv;
29 u32 ret;
30
31 do {
32 oldv = READ_ONCE(*p);
33 ret = (oldv & bitmask) >> bitoff;
34 newv = (oldv & ~bitmask) | (x << bitoff);
35 } while (__cmpxchg_u32(p, oldv, newv) != oldv);
36
37 return ret;
38}
39
40static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
41{
42 return __xchg_cmpxchg(m, val, sizeof *m);
43}
44
45static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
46{
47 return __xchg_cmpxchg(m, val, sizeof *m);
48}
49
50#endif /* __ASM_SH_CMPXCHG_XCHG_H */