Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 2 | /* |
| 3 | * Generic UP xchg and cmpxchg using interrupt disablement. Does not |
| 4 | * support SMP. |
| 5 | */ |
| 6 | |
Mathieu Desnoyers | 068fbad | 2008-02-07 00:16:07 -0800 | [diff] [blame] | 7 | #ifndef __ASM_GENERIC_CMPXCHG_H |
| 8 | #define __ASM_GENERIC_CMPXCHG_H |
| 9 | |
Mathieu Desnoyers | 068fbad | 2008-02-07 00:16:07 -0800 | [diff] [blame] | 10 | #ifdef CONFIG_SMP |
| 11 | #error "Cannot use generic cmpxchg on SMP" |
| 12 | #endif |
| 13 | |
Paul Gortmaker | 80da6a4 | 2012-04-01 16:38:47 -0400 | [diff] [blame] | 14 | #include <linux/types.h> |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 15 | #include <linux/irqflags.h> |
| 16 | |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 17 | /* |
| 18 | * This function doesn't exist, so you'll get a linker error if |
| 19 | * something tries to do an invalidly-sized xchg(). |
| 20 | */ |
Mark Rutland | 82b993e | 2021-05-25 15:02:11 +0100 | [diff] [blame] | 21 | extern void __generic_xchg_called_with_bad_pointer(void); |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 22 | |
| 23 | static inline |
Mark Rutland | 82b993e | 2021-05-25 15:02:11 +0100 | [diff] [blame] | 24 | unsigned long __generic_xchg(unsigned long x, volatile void *ptr, int size) |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 25 | { |
| 26 | unsigned long ret, flags; |
| 27 | |
| 28 | switch (size) { |
| 29 | case 1: |
| 30 | #ifdef __xchg_u8 |
| 31 | return __xchg_u8(x, ptr); |
| 32 | #else |
| 33 | local_irq_save(flags); |
| 34 | ret = *(volatile u8 *)ptr; |
| 35 | *(volatile u8 *)ptr = x; |
| 36 | local_irq_restore(flags); |
| 37 | return ret; |
| 38 | #endif /* __xchg_u8 */ |
| 39 | |
| 40 | case 2: |
| 41 | #ifdef __xchg_u16 |
| 42 | return __xchg_u16(x, ptr); |
| 43 | #else |
| 44 | local_irq_save(flags); |
| 45 | ret = *(volatile u16 *)ptr; |
| 46 | *(volatile u16 *)ptr = x; |
| 47 | local_irq_restore(flags); |
| 48 | return ret; |
| 49 | #endif /* __xchg_u16 */ |
| 50 | |
| 51 | case 4: |
| 52 | #ifdef __xchg_u32 |
| 53 | return __xchg_u32(x, ptr); |
| 54 | #else |
| 55 | local_irq_save(flags); |
| 56 | ret = *(volatile u32 *)ptr; |
| 57 | *(volatile u32 *)ptr = x; |
| 58 | local_irq_restore(flags); |
| 59 | return ret; |
| 60 | #endif /* __xchg_u32 */ |
| 61 | |
| 62 | #ifdef CONFIG_64BIT |
| 63 | case 8: |
| 64 | #ifdef __xchg_u64 |
| 65 | return __xchg_u64(x, ptr); |
| 66 | #else |
| 67 | local_irq_save(flags); |
| 68 | ret = *(volatile u64 *)ptr; |
| 69 | *(volatile u64 *)ptr = x; |
| 70 | local_irq_restore(flags); |
| 71 | return ret; |
| 72 | #endif /* __xchg_u64 */ |
| 73 | #endif /* CONFIG_64BIT */ |
| 74 | |
| 75 | default: |
Mark Rutland | 82b993e | 2021-05-25 15:02:11 +0100 | [diff] [blame] | 76 | __generic_xchg_called_with_bad_pointer(); |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 77 | return x; |
| 78 | } |
| 79 | } |
| 80 | |
Mark Rutland | 82b993e | 2021-05-25 15:02:11 +0100 | [diff] [blame] | 81 | #define generic_xchg(ptr, x) ({ \ |
| 82 | ((__typeof__(*(ptr))) \ |
| 83 | __generic_xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \ |
Marek Vasut | d975440 | 2015-09-19 06:42:21 +0200 | [diff] [blame] | 84 | }) |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 85 | |
Mathieu Desnoyers | 068fbad | 2008-02-07 00:16:07 -0800 | [diff] [blame] | 86 | /* |
| 87 | * Atomic compare and exchange. |
Mathieu Desnoyers | 068fbad | 2008-02-07 00:16:07 -0800 | [diff] [blame] | 88 | */ |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 89 | #include <asm-generic/cmpxchg-local.h> |
| 90 | |
Mark Rutland | 82b993e | 2021-05-25 15:02:11 +0100 | [diff] [blame] | 91 | #define generic_cmpxchg_local(ptr, o, n) ({ \ |
| 92 | ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o), \ |
| 93 | (unsigned long)(n), sizeof(*(ptr)))); \ |
Marek Vasut | d975440 | 2015-09-19 06:42:21 +0200 | [diff] [blame] | 94 | }) |
Mark Rutland | 82b993e | 2021-05-25 15:02:11 +0100 | [diff] [blame] | 95 | |
| 96 | #define generic_cmpxchg64_local(ptr, o, n) \ |
| 97 | __generic_cmpxchg64_local((ptr), (o), (n)) |
| 98 | |
| 99 | |
Mark Rutland | 82b993e | 2021-05-25 15:02:11 +0100 | [diff] [blame] | 100 | #ifndef arch_xchg |
| 101 | #define arch_xchg generic_xchg |
| 102 | #endif |
| 103 | |
| 104 | #ifndef arch_cmpxchg_local |
| 105 | #define arch_cmpxchg_local generic_cmpxchg_local |
| 106 | #endif |
| 107 | |
| 108 | #ifndef arch_cmpxchg64_local |
| 109 | #define arch_cmpxchg64_local generic_cmpxchg64_local |
| 110 | #endif |
| 111 | |
| 112 | #define arch_cmpxchg arch_cmpxchg_local |
| 113 | #define arch_cmpxchg64 arch_cmpxchg64_local |
| 114 | |
David Howells | b4816af | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 115 | #endif /* __ASM_GENERIC_CMPXCHG_H */ |