Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 1 | /* |
| 2 | * This file is subject to the terms and conditions of the GNU General Public |
| 3 | * License. See the file "COPYING" in the main directory of this archive |
| 4 | * for more details. |
| 5 | * |
| 6 | * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org) |
| 7 | */ |
| 8 | #ifndef __ASM_CMPXCHG_H |
| 9 | #define __ASM_CMPXCHG_H |
| 10 | |
Aaro Koskinen | 5520e42 | 2012-07-19 09:11:15 +0200 | [diff] [blame] | 11 | #include <linux/bug.h> |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 12 | #include <linux/irqflags.h> |
Maciej W. Rozycki | b0984c4 | 2014-11-15 22:08:48 +0000 | [diff] [blame] | 13 | #include <asm/compiler.h> |
David Howells | b81947c | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 14 | #include <asm/war.h> |
| 15 | |
Paul Burton | 6b1e762 | 2017-06-09 17:26:33 -0700 | [diff] [blame] | 16 | /* |
| 17 | * Using a branch-likely instruction to check the result of an sc instruction |
| 18 | * works around a bug present in R10000 CPUs prior to revision 3.0 that could |
| 19 | * cause ll-sc sequences to execute non-atomically. |
| 20 | */ |
| 21 | #if R10000_LLSC_WAR |
| 22 | # define __scbeqz "beqzl" |
| 23 | #else |
| 24 | # define __scbeqz "beqz" |
| 25 | #endif |
| 26 | |
Paul Burton | d15dc68 | 2017-06-09 17:26:36 -0700 | [diff] [blame] | 27 | /* |
| 28 | * These functions doesn't exist, so if they are called you'll either: |
| 29 | * |
| 30 | * - Get an error at compile-time due to __compiletime_error, if supported by |
| 31 | * your compiler. |
| 32 | * |
| 33 | * or: |
| 34 | * |
| 35 | * - Get an error at link-time due to the call to the missing function. |
| 36 | */ |
Paul Burton | 8263db4 | 2017-06-09 17:26:38 -0700 | [diff] [blame] | 37 | extern unsigned long __cmpxchg_called_with_bad_pointer(void) |
Paul Burton | d15dc68 | 2017-06-09 17:26:36 -0700 | [diff] [blame] | 38 | __compiletime_error("Bad argument size for cmpxchg"); |
Paul Burton | c7e2d71 | 2019-02-06 14:38:56 -0800 | [diff] [blame] | 39 | extern unsigned long __cmpxchg64_unsupported(void) |
| 40 | __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false"); |
Paul Burton | d15dc68 | 2017-06-09 17:26:36 -0700 | [diff] [blame] | 41 | extern unsigned long __xchg_called_with_bad_pointer(void) |
| 42 | __compiletime_error("Bad argument size for xchg"); |
| 43 | |
Paul Burton | 5154f3b | 2017-06-09 17:26:34 -0700 | [diff] [blame] | 44 | #define __xchg_asm(ld, st, m, val) \ |
| 45 | ({ \ |
| 46 | __typeof(*(m)) __ret; \ |
| 47 | \ |
| 48 | if (kernel_uses_llsc) { \ |
Peter Zijlstra | 1c6c1ca | 2019-06-13 15:43:19 +0200 | [diff] [blame] | 49 | loongson_llsc_mb(); \ |
Paul Burton | 5154f3b | 2017-06-09 17:26:34 -0700 | [diff] [blame] | 50 | __asm__ __volatile__( \ |
| 51 | " .set push \n" \ |
| 52 | " .set noat \n" \ |
Paul Burton | 378ed6f | 2018-11-08 20:14:38 +0000 | [diff] [blame] | 53 | " .set push \n" \ |
Paul Burton | 5154f3b | 2017-06-09 17:26:34 -0700 | [diff] [blame] | 54 | " .set " MIPS_ISA_ARCH_LEVEL " \n" \ |
| 55 | "1: " ld " %0, %2 # __xchg_asm \n" \ |
Paul Burton | 378ed6f | 2018-11-08 20:14:38 +0000 | [diff] [blame] | 56 | " .set pop \n" \ |
Paul Burton | 5154f3b | 2017-06-09 17:26:34 -0700 | [diff] [blame] | 57 | " move $1, %z3 \n" \ |
| 58 | " .set " MIPS_ISA_ARCH_LEVEL " \n" \ |
| 59 | " " st " $1, %1 \n" \ |
| 60 | "\t" __scbeqz " $1, 1b \n" \ |
| 61 | " .set pop \n" \ |
| 62 | : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ |
| 63 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \ |
Peter Zijlstra | 4234411 | 2019-06-13 15:43:20 +0200 | [diff] [blame] | 64 | : __LLSC_CLOBBER); \ |
Paul Burton | 5154f3b | 2017-06-09 17:26:34 -0700 | [diff] [blame] | 65 | } else { \ |
| 66 | unsigned long __flags; \ |
| 67 | \ |
| 68 | raw_local_irq_save(__flags); \ |
| 69 | __ret = *m; \ |
| 70 | *m = val; \ |
| 71 | raw_local_irq_restore(__flags); \ |
| 72 | } \ |
| 73 | \ |
| 74 | __ret; \ |
| 75 | }) |
| 76 | |
Paul Burton | b70eb30 | 2017-06-09 17:26:39 -0700 | [diff] [blame] | 77 | extern unsigned long __xchg_small(volatile void *ptr, unsigned long val, |
| 78 | unsigned int size); |
| 79 | |
Paul Burton | 4843cf8 | 2017-06-09 17:26:41 -0700 | [diff] [blame] | 80 | static inline unsigned long __xchg(volatile void *ptr, unsigned long x, |
| 81 | int size) |
David Howells | b81947c | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 82 | { |
| 83 | switch (size) { |
Paul Burton | b70eb30 | 2017-06-09 17:26:39 -0700 | [diff] [blame] | 84 | case 1: |
| 85 | case 2: |
| 86 | return __xchg_small(ptr, x, size); |
| 87 | |
David Howells | b81947c | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 88 | case 4: |
Paul Burton | 62c6081 | 2017-06-09 17:26:37 -0700 | [diff] [blame] | 89 | return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x); |
| 90 | |
David Howells | b81947c | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 91 | case 8: |
Paul Burton | 62c6081 | 2017-06-09 17:26:37 -0700 | [diff] [blame] | 92 | if (!IS_ENABLED(CONFIG_64BIT)) |
| 93 | return __xchg_called_with_bad_pointer(); |
| 94 | |
| 95 | return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x); |
| 96 | |
Paul Burton | d15dc68 | 2017-06-09 17:26:36 -0700 | [diff] [blame] | 97 | default: |
| 98 | return __xchg_called_with_bad_pointer(); |
David Howells | b81947c | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 99 | } |
David Howells | b81947c | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 100 | } |
| 101 | |
| 102 | #define xchg(ptr, x) \ |
| 103 | ({ \ |
Paul Burton | 62c6081 | 2017-06-09 17:26:37 -0700 | [diff] [blame] | 104 | __typeof__(*(ptr)) __res; \ |
| 105 | \ |
Paul Burton | 62c6081 | 2017-06-09 17:26:37 -0700 | [diff] [blame] | 106 | smp_mb__before_llsc(); \ |
| 107 | \ |
| 108 | __res = (__typeof__(*(ptr))) \ |
Paul Burton | 4843cf8 | 2017-06-09 17:26:41 -0700 | [diff] [blame] | 109 | __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \ |
Paul Burton | 62c6081 | 2017-06-09 17:26:37 -0700 | [diff] [blame] | 110 | \ |
| 111 | smp_llsc_mb(); \ |
| 112 | \ |
| 113 | __res; \ |
David Howells | b81947c | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 114 | }) |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 115 | |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 116 | #define __cmpxchg_asm(ld, st, m, old, new) \ |
| 117 | ({ \ |
| 118 | __typeof(*(m)) __ret; \ |
| 119 | \ |
Paul Burton | 6b1e762 | 2017-06-09 17:26:33 -0700 | [diff] [blame] | 120 | if (kernel_uses_llsc) { \ |
Peter Zijlstra | 1c6c1ca | 2019-06-13 15:43:19 +0200 | [diff] [blame] | 121 | loongson_llsc_mb(); \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 122 | __asm__ __volatile__( \ |
| 123 | " .set push \n" \ |
| 124 | " .set noat \n" \ |
Paul Burton | 378ed6f | 2018-11-08 20:14:38 +0000 | [diff] [blame] | 125 | " .set push \n" \ |
Markos Chandras | fa998eb | 2014-11-20 13:31:48 +0000 | [diff] [blame] | 126 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 127 | "1: " ld " %0, %2 # __cmpxchg_asm \n" \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 128 | " bne %0, %z3, 2f \n" \ |
Paul Burton | 378ed6f | 2018-11-08 20:14:38 +0000 | [diff] [blame] | 129 | " .set pop \n" \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 130 | " move $1, %z4 \n" \ |
Markos Chandras | fa998eb | 2014-11-20 13:31:48 +0000 | [diff] [blame] | 131 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 132 | " " st " $1, %1 \n" \ |
Paul Burton | 6b1e762 | 2017-06-09 17:26:33 -0700 | [diff] [blame] | 133 | "\t" __scbeqz " $1, 1b \n" \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 134 | " .set pop \n" \ |
Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 135 | "2: \n" \ |
Markos Chandras | 94bfb75 | 2015-01-26 12:44:11 +0000 | [diff] [blame] | 136 | : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ |
Peter Zijlstra | 4234411 | 2019-06-13 15:43:20 +0200 | [diff] [blame] | 137 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ |
| 138 | : __LLSC_CLOBBER); \ |
Peter Zijlstra | 1c6c1ca | 2019-06-13 15:43:19 +0200 | [diff] [blame] | 139 | loongson_llsc_mb(); \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 140 | } else { \ |
| 141 | unsigned long __flags; \ |
| 142 | \ |
| 143 | raw_local_irq_save(__flags); \ |
| 144 | __ret = *m; \ |
| 145 | if (__ret == old) \ |
| 146 | *m = new; \ |
| 147 | raw_local_irq_restore(__flags); \ |
| 148 | } \ |
| 149 | \ |
| 150 | __ret; \ |
| 151 | }) |
| 152 | |
Paul Burton | 3ba7f44 | 2017-06-09 17:26:40 -0700 | [diff] [blame] | 153 | extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old, |
| 154 | unsigned long new, unsigned int size); |
| 155 | |
Paul Burton | 8263db4 | 2017-06-09 17:26:38 -0700 | [diff] [blame] | 156 | static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old, |
| 157 | unsigned long new, unsigned int size) |
| 158 | { |
| 159 | switch (size) { |
Paul Burton | 3ba7f44 | 2017-06-09 17:26:40 -0700 | [diff] [blame] | 160 | case 1: |
| 161 | case 2: |
| 162 | return __cmpxchg_small(ptr, old, new, size); |
| 163 | |
Paul Burton | 8263db4 | 2017-06-09 17:26:38 -0700 | [diff] [blame] | 164 | case 4: |
Paul Burton | 133d68e | 2017-09-01 14:46:50 -0700 | [diff] [blame] | 165 | return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr, |
| 166 | (u32)old, new); |
Paul Burton | 8263db4 | 2017-06-09 17:26:38 -0700 | [diff] [blame] | 167 | |
| 168 | case 8: |
| 169 | /* lld/scd are only available for MIPS64 */ |
| 170 | if (!IS_ENABLED(CONFIG_64BIT)) |
| 171 | return __cmpxchg_called_with_bad_pointer(); |
| 172 | |
Paul Burton | 133d68e | 2017-09-01 14:46:50 -0700 | [diff] [blame] | 173 | return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr, |
| 174 | (u64)old, new); |
Paul Burton | 8263db4 | 2017-06-09 17:26:38 -0700 | [diff] [blame] | 175 | |
| 176 | default: |
| 177 | return __cmpxchg_called_with_bad_pointer(); |
| 178 | } |
| 179 | } |
| 180 | |
| 181 | #define cmpxchg_local(ptr, old, new) \ |
| 182 | ((__typeof__(*(ptr))) \ |
| 183 | __cmpxchg((ptr), \ |
| 184 | (unsigned long)(__typeof__(*(ptr)))(old), \ |
| 185 | (unsigned long)(__typeof__(*(ptr)))(new), \ |
| 186 | sizeof(*(ptr)))) |
| 187 | |
| 188 | #define cmpxchg(ptr, old, new) \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 189 | ({ \ |
Paul Burton | 8263db4 | 2017-06-09 17:26:38 -0700 | [diff] [blame] | 190 | __typeof__(*(ptr)) __res; \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 191 | \ |
Paul Burton | 8263db4 | 2017-06-09 17:26:38 -0700 | [diff] [blame] | 192 | smp_mb__before_llsc(); \ |
| 193 | __res = cmpxchg_local((ptr), (old), (new)); \ |
| 194 | smp_llsc_mb(); \ |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 195 | \ |
| 196 | __res; \ |
| 197 | }) |
| 198 | |
Mathieu Desnoyers | 3b96a56 | 2008-02-07 00:16:09 -0800 | [diff] [blame] | 199 | #ifdef CONFIG_64BIT |
| 200 | #define cmpxchg64_local(ptr, o, n) \ |
| 201 | ({ \ |
| 202 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 203 | cmpxchg_local((ptr), (o), (n)); \ |
| 204 | }) |
Deng-Cheng Zhu | e2093c7 | 2015-03-07 10:30:20 -0800 | [diff] [blame] | 205 | |
| 206 | #define cmpxchg64(ptr, o, n) \ |
| 207 | ({ \ |
| 208 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 209 | cmpxchg((ptr), (o), (n)); \ |
| 210 | }) |
Mathieu Desnoyers | 3b96a56 | 2008-02-07 00:16:09 -0800 | [diff] [blame] | 211 | #else |
Paul Burton | c7e2d71 | 2019-02-06 14:38:56 -0800 | [diff] [blame] | 212 | |
| 213 | # include <asm-generic/cmpxchg-local.h> |
| 214 | # define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) |
| 215 | |
| 216 | # ifdef CONFIG_SMP |
| 217 | |
| 218 | static inline unsigned long __cmpxchg64(volatile void *ptr, |
| 219 | unsigned long long old, |
| 220 | unsigned long long new) |
| 221 | { |
| 222 | unsigned long long tmp, ret; |
| 223 | unsigned long flags; |
| 224 | |
| 225 | /* |
| 226 | * The assembly below has to combine 32 bit values into a 64 bit |
| 227 | * register, and split 64 bit values from one register into two. If we |
| 228 | * were to take an interrupt in the middle of this we'd only save the |
| 229 | * least significant 32 bits of each register & probably clobber the |
| 230 | * most significant 32 bits of the 64 bit values we're using. In order |
| 231 | * to avoid this we must disable interrupts. |
| 232 | */ |
| 233 | local_irq_save(flags); |
| 234 | |
Peter Zijlstra | 1c6c1ca | 2019-06-13 15:43:19 +0200 | [diff] [blame] | 235 | loongson_llsc_mb(); |
Paul Burton | c7e2d71 | 2019-02-06 14:38:56 -0800 | [diff] [blame] | 236 | asm volatile( |
| 237 | " .set push \n" |
| 238 | " .set " MIPS_ISA_ARCH_LEVEL " \n" |
| 239 | /* Load 64 bits from ptr */ |
| 240 | "1: lld %L0, %3 # __cmpxchg64 \n" |
| 241 | /* |
| 242 | * Split the 64 bit value we loaded into the 2 registers that hold the |
| 243 | * ret variable. |
| 244 | */ |
| 245 | " dsra %M0, %L0, 32 \n" |
| 246 | " sll %L0, %L0, 0 \n" |
| 247 | /* |
| 248 | * Compare ret against old, breaking out of the loop if they don't |
| 249 | * match. |
| 250 | */ |
| 251 | " bne %M0, %M4, 2f \n" |
| 252 | " bne %L0, %L4, 2f \n" |
| 253 | /* |
| 254 | * Combine the 32 bit halves from the 2 registers that hold the new |
| 255 | * variable into a single 64 bit register. |
| 256 | */ |
| 257 | # if MIPS_ISA_REV >= 2 |
| 258 | " move %L1, %L5 \n" |
| 259 | " dins %L1, %M5, 32, 32 \n" |
| 260 | # else |
| 261 | " dsll %L1, %L5, 32 \n" |
| 262 | " dsrl %L1, %L1, 32 \n" |
| 263 | " .set noat \n" |
| 264 | " dsll $at, %M5, 32 \n" |
| 265 | " or %L1, %L1, $at \n" |
| 266 | " .set at \n" |
| 267 | # endif |
| 268 | /* Attempt to store new at ptr */ |
| 269 | " scd %L1, %2 \n" |
| 270 | /* If we failed, loop! */ |
| 271 | "\t" __scbeqz " %L1, 1b \n" |
| 272 | " .set pop \n" |
| 273 | "2: \n" |
| 274 | : "=&r"(ret), |
| 275 | "=&r"(tmp), |
| 276 | "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr) |
| 277 | : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr), |
| 278 | "r" (old), |
| 279 | "r" (new) |
| 280 | : "memory"); |
Peter Zijlstra | 1c6c1ca | 2019-06-13 15:43:19 +0200 | [diff] [blame] | 281 | loongson_llsc_mb(); |
Paul Burton | c7e2d71 | 2019-02-06 14:38:56 -0800 | [diff] [blame] | 282 | |
| 283 | local_irq_restore(flags); |
| 284 | return ret; |
| 285 | } |
| 286 | |
| 287 | # define cmpxchg64(ptr, o, n) ({ \ |
| 288 | unsigned long long __old = (__typeof__(*(ptr)))(o); \ |
| 289 | unsigned long long __new = (__typeof__(*(ptr)))(n); \ |
| 290 | __typeof__(*(ptr)) __res; \ |
| 291 | \ |
| 292 | /* \ |
| 293 | * We can only use cmpxchg64 if we know that the CPU supports \ |
| 294 | * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported \ |
| 295 | * will cause a build error unless cpu_has_64bits is a \ |
| 296 | * compile-time constant 1. \ |
| 297 | */ \ |
Peter Zijlstra | dfc8d8d | 2019-06-13 15:43:18 +0200 | [diff] [blame] | 298 | if (cpu_has_64bits && kernel_uses_llsc) { \ |
| 299 | smp_mb__before_llsc(); \ |
Paul Burton | c7e2d71 | 2019-02-06 14:38:56 -0800 | [diff] [blame] | 300 | __res = __cmpxchg64((ptr), __old, __new); \ |
Peter Zijlstra | dfc8d8d | 2019-06-13 15:43:18 +0200 | [diff] [blame] | 301 | smp_llsc_mb(); \ |
| 302 | } else { \ |
Paul Burton | c7e2d71 | 2019-02-06 14:38:56 -0800 | [diff] [blame] | 303 | __res = __cmpxchg64_unsupported(); \ |
Peter Zijlstra | dfc8d8d | 2019-06-13 15:43:18 +0200 | [diff] [blame] | 304 | } \ |
Paul Burton | c7e2d71 | 2019-02-06 14:38:56 -0800 | [diff] [blame] | 305 | \ |
| 306 | __res; \ |
| 307 | }) |
| 308 | |
| 309 | # else /* !CONFIG_SMP */ |
| 310 | # define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n)) |
| 311 | # endif /* !CONFIG_SMP */ |
| 312 | #endif /* !CONFIG_64BIT */ |
Mathieu Desnoyers | 3b96a56 | 2008-02-07 00:16:09 -0800 | [diff] [blame] | 313 | |
Paul Burton | 6b1e762 | 2017-06-09 17:26:33 -0700 | [diff] [blame] | 314 | #undef __scbeqz |
| 315 | |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame] | 316 | #endif /* __ASM_CMPXCHG_H */ |