blob: dca4419922a971fea7037a70ce7a550d37a92459 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
David Howellsb4816af2012-03-28 18:30:03 +01002/*
3 * Generic UP xchg and cmpxchg using interrupt disablement. Does not
4 * support SMP.
5 */
6
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -08007#ifndef __ASM_GENERIC_CMPXCHG_H
8#define __ASM_GENERIC_CMPXCHG_H
9
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -080010#ifdef CONFIG_SMP
11#error "Cannot use generic cmpxchg on SMP"
12#endif
13
Paul Gortmaker80da6a42012-04-01 16:38:47 -040014#include <linux/types.h>
David Howellsb4816af2012-03-28 18:30:03 +010015#include <linux/irqflags.h>
16
David Howellsb4816af2012-03-28 18:30:03 +010017/*
18 * This function doesn't exist, so you'll get a linker error if
19 * something tries to do an invalidly-sized xchg().
20 */
Mark Rutland82b993e2021-05-25 15:02:11 +010021extern void __generic_xchg_called_with_bad_pointer(void);
David Howellsb4816af2012-03-28 18:30:03 +010022
23static inline
Mark Rutland82b993e2021-05-25 15:02:11 +010024unsigned long __generic_xchg(unsigned long x, volatile void *ptr, int size)
David Howellsb4816af2012-03-28 18:30:03 +010025{
26 unsigned long ret, flags;
27
28 switch (size) {
29 case 1:
30#ifdef __xchg_u8
31 return __xchg_u8(x, ptr);
32#else
33 local_irq_save(flags);
34 ret = *(volatile u8 *)ptr;
35 *(volatile u8 *)ptr = x;
36 local_irq_restore(flags);
37 return ret;
38#endif /* __xchg_u8 */
39
40 case 2:
41#ifdef __xchg_u16
42 return __xchg_u16(x, ptr);
43#else
44 local_irq_save(flags);
45 ret = *(volatile u16 *)ptr;
46 *(volatile u16 *)ptr = x;
47 local_irq_restore(flags);
48 return ret;
49#endif /* __xchg_u16 */
50
51 case 4:
52#ifdef __xchg_u32
53 return __xchg_u32(x, ptr);
54#else
55 local_irq_save(flags);
56 ret = *(volatile u32 *)ptr;
57 *(volatile u32 *)ptr = x;
58 local_irq_restore(flags);
59 return ret;
60#endif /* __xchg_u32 */
61
62#ifdef CONFIG_64BIT
63 case 8:
64#ifdef __xchg_u64
65 return __xchg_u64(x, ptr);
66#else
67 local_irq_save(flags);
68 ret = *(volatile u64 *)ptr;
69 *(volatile u64 *)ptr = x;
70 local_irq_restore(flags);
71 return ret;
72#endif /* __xchg_u64 */
73#endif /* CONFIG_64BIT */
74
75 default:
Mark Rutland82b993e2021-05-25 15:02:11 +010076 __generic_xchg_called_with_bad_pointer();
David Howellsb4816af2012-03-28 18:30:03 +010077 return x;
78 }
79}
80
Mark Rutland82b993e2021-05-25 15:02:11 +010081#define generic_xchg(ptr, x) ({ \
82 ((__typeof__(*(ptr))) \
83 __generic_xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
Marek Vasutd9754402015-09-19 06:42:21 +020084})
David Howellsb4816af2012-03-28 18:30:03 +010085
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -080086/*
87 * Atomic compare and exchange.
Mathieu Desnoyers068fbad2008-02-07 00:16:07 -080088 */
David Howellsb4816af2012-03-28 18:30:03 +010089#include <asm-generic/cmpxchg-local.h>
90
Mark Rutland82b993e2021-05-25 15:02:11 +010091#define generic_cmpxchg_local(ptr, o, n) ({ \
92 ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o), \
93 (unsigned long)(n), sizeof(*(ptr)))); \
Marek Vasutd9754402015-09-19 06:42:21 +020094})
Mark Rutland82b993e2021-05-25 15:02:11 +010095
96#define generic_cmpxchg64_local(ptr, o, n) \
97 __generic_cmpxchg64_local((ptr), (o), (n))
98
99
Mark Rutland82b993e2021-05-25 15:02:11 +0100100#ifndef arch_xchg
101#define arch_xchg generic_xchg
102#endif
103
104#ifndef arch_cmpxchg_local
105#define arch_cmpxchg_local generic_cmpxchg_local
106#endif
107
108#ifndef arch_cmpxchg64_local
109#define arch_cmpxchg64_local generic_cmpxchg64_local
110#endif
111
112#define arch_cmpxchg arch_cmpxchg_local
113#define arch_cmpxchg64 arch_cmpxchg64_local
114
David Howellsb4816af2012-03-28 18:30:03 +0100115#endif /* __ASM_GENERIC_CMPXCHG_H */