blob: 9a24510cd8c1848f4c655ffc9f57d0ca4e1f1f4c [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
David Howellsb4816af2012-03-28 18:30:03 +01002/*
3 * Generic UP xchg and cmpxchg using interrupt disablement. Does not
4 * support SMP.
5 */
6
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -08007#ifndef __ASM_GENERIC_CMPXCHG_H
8#define __ASM_GENERIC_CMPXCHG_H
9
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -080010#ifdef CONFIG_SMP
11#error "Cannot use generic cmpxchg on SMP"
12#endif
13
Paul Gortmaker80da6a42012-04-01 16:38:47 -040014#include <linux/types.h>
David Howellsb4816af2012-03-28 18:30:03 +010015#include <linux/irqflags.h>
16
17#ifndef xchg
18
19/*
20 * This function doesn't exist, so you'll get a linker error if
21 * something tries to do an invalidly-sized xchg().
22 */
23extern void __xchg_called_with_bad_pointer(void);
24
25static inline
26unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
27{
28 unsigned long ret, flags;
29
30 switch (size) {
31 case 1:
32#ifdef __xchg_u8
33 return __xchg_u8(x, ptr);
34#else
35 local_irq_save(flags);
36 ret = *(volatile u8 *)ptr;
37 *(volatile u8 *)ptr = x;
38 local_irq_restore(flags);
39 return ret;
40#endif /* __xchg_u8 */
41
42 case 2:
43#ifdef __xchg_u16
44 return __xchg_u16(x, ptr);
45#else
46 local_irq_save(flags);
47 ret = *(volatile u16 *)ptr;
48 *(volatile u16 *)ptr = x;
49 local_irq_restore(flags);
50 return ret;
51#endif /* __xchg_u16 */
52
53 case 4:
54#ifdef __xchg_u32
55 return __xchg_u32(x, ptr);
56#else
57 local_irq_save(flags);
58 ret = *(volatile u32 *)ptr;
59 *(volatile u32 *)ptr = x;
60 local_irq_restore(flags);
61 return ret;
62#endif /* __xchg_u32 */
63
64#ifdef CONFIG_64BIT
65 case 8:
66#ifdef __xchg_u64
67 return __xchg_u64(x, ptr);
68#else
69 local_irq_save(flags);
70 ret = *(volatile u64 *)ptr;
71 *(volatile u64 *)ptr = x;
72 local_irq_restore(flags);
73 return ret;
74#endif /* __xchg_u64 */
75#endif /* CONFIG_64BIT */
76
77 default:
78 __xchg_called_with_bad_pointer();
79 return x;
80 }
81}
82
Marek Vasutd9754402015-09-19 06:42:21 +020083#define xchg(ptr, x) ({ \
84 ((__typeof__(*(ptr))) \
85 __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
86})
David Howellsb4816af2012-03-28 18:30:03 +010087
88#endif /* xchg */
David Howells34484272012-03-28 18:30:03 +010089
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -080090/*
91 * Atomic compare and exchange.
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -080092 */
David Howellsb4816af2012-03-28 18:30:03 +010093#include <asm-generic/cmpxchg-local.h>
94
Jonas Bonn00c30e02013-02-28 06:37:05 +010095#ifndef cmpxchg_local
Marek Vasutd9754402015-09-19 06:42:21 +020096#define cmpxchg_local(ptr, o, n) ({ \
Jonas Bonn00c30e02013-02-28 06:37:05 +010097 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
Marek Vasutd9754402015-09-19 06:42:21 +020098 (unsigned long)(n), sizeof(*(ptr)))); \
99})
Jonas Bonn00c30e02013-02-28 06:37:05 +0100100#endif
101
102#ifndef cmpxchg64_local
103#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
104#endif
105
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -0800106#define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n))
107#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
108
David Howellsb4816af2012-03-28 18:30:03 +0100109#endif /* __ASM_GENERIC_CMPXCHG_H */