blob: 593a9704782bdc7108696011ab0d65850d5393d3 [file] [log] [blame]
Michael S. Tsirkin9e3f84c2016-01-08 09:23:58 +02001#ifndef __ASM_SH_CMPXCHG_XCHG_H
2#define __ASM_SH_CMPXCHG_XCHG_H
3
4/*
5 * Copyright (C) 2016 Red Hat, Inc.
6 * Author: Michael S. Tsirkin <mst@redhat.com>
7 *
8 * This work is licensed under the terms of the GNU GPL, version 2. See the
9 * file "COPYING" in the main directory of this archive for more details.
10 */
Will Deacon1c2672b2018-06-19 13:53:10 +010011#include <linux/bits.h>
12#include <linux/compiler.h>
Michael S. Tsirkin9e3f84c2016-01-08 09:23:58 +020013#include <asm/byteorder.h>
14
15/*
16 * Portable implementations of 1 and 2 byte xchg using a 4 byte cmpxchg.
17 * Note: this header isn't self-contained: before including it, __cmpxchg_u32
18 * must be defined first.
19 */
20static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
21{
22 int off = (unsigned long)ptr % sizeof(u32);
23 volatile u32 *p = ptr - off;
24#ifdef __BIG_ENDIAN
Pan Xinhuiff181432016-04-20 14:41:00 +080025 int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
Michael S. Tsirkin9e3f84c2016-01-08 09:23:58 +020026#else
27 int bitoff = off * BITS_PER_BYTE;
28#endif
29 u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
30 u32 oldv, newv;
31 u32 ret;
32
33 do {
34 oldv = READ_ONCE(*p);
35 ret = (oldv & bitmask) >> bitoff;
36 newv = (oldv & ~bitmask) | (x << bitoff);
37 } while (__cmpxchg_u32(p, oldv, newv) != oldv);
38
39 return ret;
40}
41
42static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
43{
44 return __xchg_cmpxchg(m, val, sizeof *m);
45}
46
47static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
48{
49 return __xchg_cmpxchg(m, val, sizeof *m);
50}
51
52#endif /* __ASM_SH_CMPXCHG_XCHG_H */