Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 2 | #ifndef ASM_X86_CMPXCHG_H |
| 3 | #define ASM_X86_CMPXCHG_H |
| 4 | |
Jeremy Fitzhardinge | 61e2cd0 | 2011-08-29 14:47:58 -0700 | [diff] [blame] | 5 | #include <linux/compiler.h> |
Borislav Petkov | cd4d09e | 2016-01-26 22:12:04 +0100 | [diff] [blame] | 6 | #include <asm/cpufeatures.h> |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 7 | #include <asm/alternative.h> /* Provides LOCK_PREFIX */ |
| 8 | |
Jeremy Fitzhardinge | 61e2cd0 | 2011-08-29 14:47:58 -0700 | [diff] [blame] | 9 | /* |
| 10 | * Non-existant functions to indicate usage errors at link time |
| 11 | * (or compile-time if the compiler implements __compiletime_error(). |
| 12 | */ |
| 13 | extern void __xchg_wrong_size(void) |
| 14 | __compiletime_error("Bad argument size for xchg"); |
| 15 | extern void __cmpxchg_wrong_size(void) |
| 16 | __compiletime_error("Bad argument size for cmpxchg"); |
| 17 | extern void __xadd_wrong_size(void) |
| 18 | __compiletime_error("Bad argument size for xadd"); |
Jeremy Fitzhardinge | 3d94ae0 | 2011-09-28 11:49:28 -0700 | [diff] [blame] | 19 | extern void __add_wrong_size(void) |
| 20 | __compiletime_error("Bad argument size for add"); |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 21 | |
| 22 | /* |
| 23 | * Constants for operation sizes. On 32-bit, the 64-bit size it set to |
| 24 | * -1 because sizeof will never return -1, thereby making those switch |
| 25 | * case statements guaranteeed dead code which the compiler will |
| 26 | * eliminate, and allowing the "missing symbol in the default case" to |
| 27 | * indicate a usage error. |
| 28 | */ |
| 29 | #define __X86_CASE_B 1 |
| 30 | #define __X86_CASE_W 2 |
| 31 | #define __X86_CASE_L 4 |
| 32 | #ifdef CONFIG_64BIT |
| 33 | #define __X86_CASE_Q 8 |
| 34 | #else |
| 35 | #define __X86_CASE_Q -1 /* sizeof will never return -1 */ |
| 36 | #endif |
| 37 | |
Jeremy Fitzhardinge | 31a8394 | 2011-09-30 12:14:10 -0700 | [diff] [blame] | 38 | /* |
| 39 | * An exchange-type operation, which takes a value and a pointer, and |
Li Zhong | 7f5281a | 2013-04-25 15:20:54 +0800 | [diff] [blame] | 40 | * returns the old value. |
Jeremy Fitzhardinge | 31a8394 | 2011-09-30 12:14:10 -0700 | [diff] [blame] | 41 | */ |
| 42 | #define __xchg_op(ptr, arg, op, lock) \ |
| 43 | ({ \ |
| 44 | __typeof__ (*(ptr)) __ret = (arg); \ |
| 45 | switch (sizeof(*(ptr))) { \ |
| 46 | case __X86_CASE_B: \ |
| 47 | asm volatile (lock #op "b %b0, %1\n" \ |
Jeremy Fitzhardinge | 2ca052a | 2012-04-02 16:15:33 -0700 | [diff] [blame] | 48 | : "+q" (__ret), "+m" (*(ptr)) \ |
Jeremy Fitzhardinge | 31a8394 | 2011-09-30 12:14:10 -0700 | [diff] [blame] | 49 | : : "memory", "cc"); \ |
| 50 | break; \ |
| 51 | case __X86_CASE_W: \ |
| 52 | asm volatile (lock #op "w %w0, %1\n" \ |
| 53 | : "+r" (__ret), "+m" (*(ptr)) \ |
| 54 | : : "memory", "cc"); \ |
| 55 | break; \ |
| 56 | case __X86_CASE_L: \ |
| 57 | asm volatile (lock #op "l %0, %1\n" \ |
| 58 | : "+r" (__ret), "+m" (*(ptr)) \ |
| 59 | : : "memory", "cc"); \ |
| 60 | break; \ |
| 61 | case __X86_CASE_Q: \ |
| 62 | asm volatile (lock #op "q %q0, %1\n" \ |
| 63 | : "+r" (__ret), "+m" (*(ptr)) \ |
| 64 | : : "memory", "cc"); \ |
| 65 | break; \ |
| 66 | default: \ |
| 67 | __ ## op ## _wrong_size(); \ |
| 68 | } \ |
| 69 | __ret; \ |
| 70 | }) |
| 71 | |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 72 | /* |
| 73 | * Note: no "lock" prefix even on SMP: xchg always implies lock anyway. |
| 74 | * Since this is generally used to protect other memory information, we |
| 75 | * use "asm volatile" and "memory" clobbers to prevent gcc from moving |
| 76 | * information around. |
| 77 | */ |
Mark Rutland | f9881cc | 2018-07-16 12:30:09 +0100 | [diff] [blame^] | 78 | #define arch_xchg(ptr, v) __xchg_op((ptr), (v), xchg, "") |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 79 | |
| 80 | /* |
| 81 | * Atomic compare and exchange. Compare OLD with MEM, if identical, |
| 82 | * store NEW in MEM. Return the initial value in MEM. Success is |
| 83 | * indicated by comparing RETURN with OLD. |
| 84 | */ |
| 85 | #define __raw_cmpxchg(ptr, old, new, size, lock) \ |
| 86 | ({ \ |
| 87 | __typeof__(*(ptr)) __ret; \ |
| 88 | __typeof__(*(ptr)) __old = (old); \ |
| 89 | __typeof__(*(ptr)) __new = (new); \ |
| 90 | switch (size) { \ |
| 91 | case __X86_CASE_B: \ |
| 92 | { \ |
| 93 | volatile u8 *__ptr = (volatile u8 *)(ptr); \ |
| 94 | asm volatile(lock "cmpxchgb %2,%1" \ |
| 95 | : "=a" (__ret), "+m" (*__ptr) \ |
| 96 | : "q" (__new), "0" (__old) \ |
| 97 | : "memory"); \ |
| 98 | break; \ |
| 99 | } \ |
| 100 | case __X86_CASE_W: \ |
| 101 | { \ |
| 102 | volatile u16 *__ptr = (volatile u16 *)(ptr); \ |
| 103 | asm volatile(lock "cmpxchgw %2,%1" \ |
| 104 | : "=a" (__ret), "+m" (*__ptr) \ |
| 105 | : "r" (__new), "0" (__old) \ |
| 106 | : "memory"); \ |
| 107 | break; \ |
| 108 | } \ |
| 109 | case __X86_CASE_L: \ |
| 110 | { \ |
| 111 | volatile u32 *__ptr = (volatile u32 *)(ptr); \ |
| 112 | asm volatile(lock "cmpxchgl %2,%1" \ |
| 113 | : "=a" (__ret), "+m" (*__ptr) \ |
| 114 | : "r" (__new), "0" (__old) \ |
| 115 | : "memory"); \ |
| 116 | break; \ |
| 117 | } \ |
| 118 | case __X86_CASE_Q: \ |
| 119 | { \ |
| 120 | volatile u64 *__ptr = (volatile u64 *)(ptr); \ |
| 121 | asm volatile(lock "cmpxchgq %2,%1" \ |
| 122 | : "=a" (__ret), "+m" (*__ptr) \ |
| 123 | : "r" (__new), "0" (__old) \ |
| 124 | : "memory"); \ |
| 125 | break; \ |
| 126 | } \ |
| 127 | default: \ |
| 128 | __cmpxchg_wrong_size(); \ |
| 129 | } \ |
| 130 | __ret; \ |
| 131 | }) |
| 132 | |
| 133 | #define __cmpxchg(ptr, old, new, size) \ |
| 134 | __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX) |
| 135 | |
| 136 | #define __sync_cmpxchg(ptr, old, new, size) \ |
| 137 | __raw_cmpxchg((ptr), (old), (new), (size), "lock; ") |
| 138 | |
| 139 | #define __cmpxchg_local(ptr, old, new, size) \ |
| 140 | __raw_cmpxchg((ptr), (old), (new), (size), "") |
| 141 | |
Thomas Gleixner | 96a388d | 2007-10-11 11:20:03 +0200 | [diff] [blame] | 142 | #ifdef CONFIG_X86_32 |
David Howells | a1ce392 | 2012-10-02 18:01:25 +0100 | [diff] [blame] | 143 | # include <asm/cmpxchg_32.h> |
Thomas Gleixner | 96a388d | 2007-10-11 11:20:03 +0200 | [diff] [blame] | 144 | #else |
David Howells | a1ce392 | 2012-10-02 18:01:25 +0100 | [diff] [blame] | 145 | # include <asm/cmpxchg_64.h> |
Thomas Gleixner | 96a388d | 2007-10-11 11:20:03 +0200 | [diff] [blame] | 146 | #endif |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 147 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 148 | #define arch_cmpxchg(ptr, old, new) \ |
Jan Beulich | fc395b9 | 2012-01-26 15:47:37 +0000 | [diff] [blame] | 149 | __cmpxchg(ptr, old, new, sizeof(*(ptr))) |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 150 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 151 | #define arch_sync_cmpxchg(ptr, old, new) \ |
Jan Beulich | fc395b9 | 2012-01-26 15:47:37 +0000 | [diff] [blame] | 152 | __sync_cmpxchg(ptr, old, new, sizeof(*(ptr))) |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 153 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 154 | #define arch_cmpxchg_local(ptr, old, new) \ |
Jan Beulich | fc395b9 | 2012-01-26 15:47:37 +0000 | [diff] [blame] | 155 | __cmpxchg_local(ptr, old, new, sizeof(*(ptr))) |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 156 | |
Peter Zijlstra | a9ebf30 | 2017-02-01 16:39:38 +0100 | [diff] [blame] | 157 | |
| 158 | #define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock) \ |
| 159 | ({ \ |
| 160 | bool success; \ |
Dmitry Vyukov | 007d185 | 2017-06-17 11:15:28 +0200 | [diff] [blame] | 161 | __typeof__(_ptr) _old = (__typeof__(_ptr))(_pold); \ |
Peter Zijlstra | a9ebf30 | 2017-02-01 16:39:38 +0100 | [diff] [blame] | 162 | __typeof__(*(_ptr)) __old = *_old; \ |
| 163 | __typeof__(*(_ptr)) __new = (_new); \ |
| 164 | switch (size) { \ |
| 165 | case __X86_CASE_B: \ |
| 166 | { \ |
| 167 | volatile u8 *__ptr = (volatile u8 *)(_ptr); \ |
| 168 | asm volatile(lock "cmpxchgb %[new], %[ptr]" \ |
| 169 | CC_SET(z) \ |
| 170 | : CC_OUT(z) (success), \ |
| 171 | [ptr] "+m" (*__ptr), \ |
| 172 | [old] "+a" (__old) \ |
| 173 | : [new] "q" (__new) \ |
| 174 | : "memory"); \ |
| 175 | break; \ |
| 176 | } \ |
| 177 | case __X86_CASE_W: \ |
| 178 | { \ |
| 179 | volatile u16 *__ptr = (volatile u16 *)(_ptr); \ |
| 180 | asm volatile(lock "cmpxchgw %[new], %[ptr]" \ |
| 181 | CC_SET(z) \ |
| 182 | : CC_OUT(z) (success), \ |
| 183 | [ptr] "+m" (*__ptr), \ |
| 184 | [old] "+a" (__old) \ |
| 185 | : [new] "r" (__new) \ |
| 186 | : "memory"); \ |
| 187 | break; \ |
| 188 | } \ |
| 189 | case __X86_CASE_L: \ |
| 190 | { \ |
| 191 | volatile u32 *__ptr = (volatile u32 *)(_ptr); \ |
| 192 | asm volatile(lock "cmpxchgl %[new], %[ptr]" \ |
| 193 | CC_SET(z) \ |
| 194 | : CC_OUT(z) (success), \ |
| 195 | [ptr] "+m" (*__ptr), \ |
| 196 | [old] "+a" (__old) \ |
| 197 | : [new] "r" (__new) \ |
| 198 | : "memory"); \ |
| 199 | break; \ |
| 200 | } \ |
| 201 | case __X86_CASE_Q: \ |
| 202 | { \ |
| 203 | volatile u64 *__ptr = (volatile u64 *)(_ptr); \ |
| 204 | asm volatile(lock "cmpxchgq %[new], %[ptr]" \ |
| 205 | CC_SET(z) \ |
| 206 | : CC_OUT(z) (success), \ |
| 207 | [ptr] "+m" (*__ptr), \ |
| 208 | [old] "+a" (__old) \ |
| 209 | : [new] "r" (__new) \ |
| 210 | : "memory"); \ |
| 211 | break; \ |
| 212 | } \ |
| 213 | default: \ |
| 214 | __cmpxchg_wrong_size(); \ |
| 215 | } \ |
Peter Zijlstra | 44fe844 | 2017-03-27 13:54:38 +0200 | [diff] [blame] | 216 | if (unlikely(!success)) \ |
| 217 | *_old = __old; \ |
| 218 | likely(success); \ |
Peter Zijlstra | a9ebf30 | 2017-02-01 16:39:38 +0100 | [diff] [blame] | 219 | }) |
| 220 | |
| 221 | #define __try_cmpxchg(ptr, pold, new, size) \ |
| 222 | __raw_try_cmpxchg((ptr), (pold), (new), (size), LOCK_PREFIX) |
| 223 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 224 | #define try_cmpxchg(ptr, pold, new) \ |
Peter Zijlstra | a9ebf30 | 2017-02-01 16:39:38 +0100 | [diff] [blame] | 225 | __try_cmpxchg((ptr), (pold), (new), sizeof(*(ptr))) |
| 226 | |
Jeremy Fitzhardinge | 433b3520 | 2011-06-21 12:00:55 -0700 | [diff] [blame] | 227 | /* |
| 228 | * xadd() adds "inc" to "*ptr" and atomically returns the previous |
| 229 | * value of "*ptr". |
| 230 | * |
| 231 | * xadd() is locked when multiple CPUs are online |
Jeremy Fitzhardinge | 433b3520 | 2011-06-21 12:00:55 -0700 | [diff] [blame] | 232 | */ |
Jeremy Fitzhardinge | 31a8394 | 2011-09-30 12:14:10 -0700 | [diff] [blame] | 233 | #define __xadd(ptr, inc, lock) __xchg_op((ptr), (inc), xadd, lock) |
Jeremy Fitzhardinge | 433b3520 | 2011-06-21 12:00:55 -0700 | [diff] [blame] | 234 | #define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX) |
Jeremy Fitzhardinge | 3d94ae0 | 2011-09-28 11:49:28 -0700 | [diff] [blame] | 235 | |
Jan Beulich | cdcd629 | 2012-01-02 17:02:18 +0000 | [diff] [blame] | 236 | #define __cmpxchg_double(pfx, p1, p2, o1, o2, n1, n2) \ |
| 237 | ({ \ |
| 238 | bool __ret; \ |
| 239 | __typeof__(*(p1)) __old1 = (o1), __new1 = (n1); \ |
| 240 | __typeof__(*(p2)) __old2 = (o2), __new2 = (n2); \ |
| 241 | BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \ |
| 242 | BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \ |
| 243 | VM_BUG_ON((unsigned long)(p1) % (2 * sizeof(long))); \ |
| 244 | VM_BUG_ON((unsigned long)((p1) + 1) != (unsigned long)(p2)); \ |
| 245 | asm volatile(pfx "cmpxchg%c4b %2; sete %0" \ |
| 246 | : "=a" (__ret), "+d" (__old2), \ |
| 247 | "+m" (*(p1)), "+m" (*(p2)) \ |
| 248 | : "i" (2 * sizeof(long)), "a" (__old1), \ |
| 249 | "b" (__new1), "c" (__new2)); \ |
| 250 | __ret; \ |
| 251 | }) |
| 252 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 253 | #define arch_cmpxchg_double(p1, p2, o1, o2, n1, n2) \ |
Jan Beulich | cdcd629 | 2012-01-02 17:02:18 +0000 | [diff] [blame] | 254 | __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2) |
| 255 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 256 | #define arch_cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \ |
Jan Beulich | cdcd629 | 2012-01-02 17:02:18 +0000 | [diff] [blame] | 257 | __cmpxchg_double(, p1, p2, o1, o2, n1, n2) |
| 258 | |
Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 259 | #endif /* ASM_X86_CMPXCHG_H */ |