blob: 5d460f6b7675e9fa8f68822c7adf1798daba98a0 [file] [log] [blame]
Thomas Gleixnercaab2772019-06-03 07:44:50 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Will Deaconc0385b22015-02-03 12:39:03 +00002/*
3 * Based on arch/arm/include/asm/atomic.h
4 *
5 * Copyright (C) 1996 Russell King.
6 * Copyright (C) 2002 Deep Blue Solutions Ltd.
7 * Copyright (C) 2012 ARM Ltd.
Will Deaconc0385b22015-02-03 12:39:03 +00008 */
9
10#ifndef __ASM_ATOMIC_LSE_H
11#define __ASM_ATOMIC_LSE_H
12
Will Deacon6822a84d2016-04-22 18:01:32 +010013#define ATOMIC_OP(op, asm_op) \
Mark Rutland8e6082e2021-12-10 15:14:06 +000014static inline void __lse_atomic_##op(int i, atomic_t *v) \
Will Deacon6822a84d2016-04-22 18:01:32 +010015{ \
Andrew Murrayaddfc382019-08-28 18:50:07 +010016 asm volatile( \
Sami Tolvanene0d58962019-10-31 12:57:05 -070017 __LSE_PREAMBLE \
Mark Rutland8e6082e2021-12-10 15:14:06 +000018 " " #asm_op " %w[i], %[v]\n" \
Mark Rutland8a578a72021-12-10 15:14:09 +000019 : [v] "+Q" (v->counter) \
20 : [i] "r" (i)); \
Will Deaconc0385b22015-02-03 12:39:03 +000021}
22
Will Deacon6822a84d2016-04-22 18:01:32 +010023ATOMIC_OP(andnot, stclr)
24ATOMIC_OP(or, stset)
25ATOMIC_OP(xor, steor)
26ATOMIC_OP(add, stadd)
Will Deaconc0385b22015-02-03 12:39:03 +000027
Mark Rutlandef532452021-12-10 15:14:07 +000028static inline void __lse_atomic_sub(int i, atomic_t *v)
29{
30 __lse_atomic_add(-i, v);
31}
32
Will Deacon6822a84d2016-04-22 18:01:32 +010033#undef ATOMIC_OP
Will Deaconc09d6a02015-02-03 16:14:13 +000034
Will Deacon2efe95f2016-04-22 18:01:33 +010035#define ATOMIC_FETCH_OP(name, mb, op, asm_op, cl...) \
Andrew Murrayaddfc382019-08-28 18:50:07 +010036static inline int __lse_atomic_fetch_##op##name(int i, atomic_t *v) \
Will Deacon2efe95f2016-04-22 18:01:33 +010037{ \
Mark Rutland8a578a72021-12-10 15:14:09 +000038 int old; \
39 \
Andrew Murrayaddfc382019-08-28 18:50:07 +010040 asm volatile( \
Sami Tolvanene0d58962019-10-31 12:57:05 -070041 __LSE_PREAMBLE \
Mark Rutland8a578a72021-12-10 15:14:09 +000042 " " #asm_op #mb " %w[i], %w[old], %[v]" \
43 : [v] "+Q" (v->counter), \
44 [old] "=r" (old) \
45 : [i] "r" (i) \
Andrew Murrayaddfc382019-08-28 18:50:07 +010046 : cl); \
Will Deacon2efe95f2016-04-22 18:01:33 +010047 \
Mark Rutland8a578a72021-12-10 15:14:09 +000048 return old; \
Will Deacon2efe95f2016-04-22 18:01:33 +010049}
50
51#define ATOMIC_FETCH_OPS(op, asm_op) \
52 ATOMIC_FETCH_OP(_relaxed, , op, asm_op) \
53 ATOMIC_FETCH_OP(_acquire, a, op, asm_op, "memory") \
54 ATOMIC_FETCH_OP(_release, l, op, asm_op, "memory") \
55 ATOMIC_FETCH_OP( , al, op, asm_op, "memory")
56
57ATOMIC_FETCH_OPS(andnot, ldclr)
58ATOMIC_FETCH_OPS(or, ldset)
59ATOMIC_FETCH_OPS(xor, ldeor)
60ATOMIC_FETCH_OPS(add, ldadd)
61
62#undef ATOMIC_FETCH_OP
63#undef ATOMIC_FETCH_OPS
64
Mark Rutlandef532452021-12-10 15:14:07 +000065#define ATOMIC_FETCH_OP_SUB(name) \
66static inline int __lse_atomic_fetch_sub##name(int i, atomic_t *v) \
67{ \
68 return __lse_atomic_fetch_add##name(-i, v); \
69}
70
71ATOMIC_FETCH_OP_SUB(_relaxed)
72ATOMIC_FETCH_OP_SUB(_acquire)
73ATOMIC_FETCH_OP_SUB(_release)
74ATOMIC_FETCH_OP_SUB( )
75
76#undef ATOMIC_FETCH_OP_SUB
77
Mark Rutland053f58b2021-12-10 15:14:10 +000078#define ATOMIC_OP_ADD_SUB_RETURN(name) \
Andrew Murrayaddfc382019-08-28 18:50:07 +010079static inline int __lse_atomic_add_return##name(int i, atomic_t *v) \
Will Deacon305d4542015-10-08 20:15:18 +010080{ \
Mark Rutland053f58b2021-12-10 15:14:10 +000081 return __lse_atomic_fetch_add##name(i, v) + i; \
Mark Rutlandef532452021-12-10 15:14:07 +000082} \
83 \
84static inline int __lse_atomic_sub_return##name(int i, atomic_t *v) \
85{ \
Mark Rutland053f58b2021-12-10 15:14:10 +000086 return __lse_atomic_fetch_sub(i, v) - i; \
Will Deaconc09d6a02015-02-03 16:14:13 +000087}
88
Mark Rutland053f58b2021-12-10 15:14:10 +000089ATOMIC_OP_ADD_SUB_RETURN(_relaxed)
90ATOMIC_OP_ADD_SUB_RETURN(_acquire)
91ATOMIC_OP_ADD_SUB_RETURN(_release)
92ATOMIC_OP_ADD_SUB_RETURN( )
Will Deacon305d4542015-10-08 20:15:18 +010093
Mark Rutlandef532452021-12-10 15:14:07 +000094#undef ATOMIC_OP_ADD_SUB_RETURN
Will Deacon305d4542015-10-08 20:15:18 +010095
Andrew Murrayaddfc382019-08-28 18:50:07 +010096static inline void __lse_atomic_and(int i, atomic_t *v)
Will Deaconc09d6a02015-02-03 16:14:13 +000097{
Mark Rutland5e9e43c2021-12-10 15:14:08 +000098 return __lse_atomic_andnot(~i, v);
Will Deaconc09d6a02015-02-03 16:14:13 +000099}
100
Will Deacon2efe95f2016-04-22 18:01:33 +0100101#define ATOMIC_FETCH_OP_AND(name, mb, cl...) \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100102static inline int __lse_atomic_fetch_and##name(int i, atomic_t *v) \
Will Deacon2efe95f2016-04-22 18:01:33 +0100103{ \
Mark Rutland5e9e43c2021-12-10 15:14:08 +0000104 return __lse_atomic_fetch_andnot##name(~i, v); \
Will Deacon2efe95f2016-04-22 18:01:33 +0100105}
106
107ATOMIC_FETCH_OP_AND(_relaxed, )
108ATOMIC_FETCH_OP_AND(_acquire, a, "memory")
109ATOMIC_FETCH_OP_AND(_release, l, "memory")
110ATOMIC_FETCH_OP_AND( , al, "memory")
111
112#undef ATOMIC_FETCH_OP_AND
113
Will Deacon6822a84d2016-04-22 18:01:32 +0100114#define ATOMIC64_OP(op, asm_op) \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100115static inline void __lse_atomic64_##op(s64 i, atomic64_t *v) \
Will Deacon6822a84d2016-04-22 18:01:32 +0100116{ \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100117 asm volatile( \
Sami Tolvanene0d58962019-10-31 12:57:05 -0700118 __LSE_PREAMBLE \
Mark Rutland8e6082e2021-12-10 15:14:06 +0000119 " " #asm_op " %[i], %[v]\n" \
Mark Rutland8a578a72021-12-10 15:14:09 +0000120 : [v] "+Q" (v->counter) \
121 : [i] "r" (i)); \
Will Deaconc0385b22015-02-03 12:39:03 +0000122}
123
Will Deacon6822a84d2016-04-22 18:01:32 +0100124ATOMIC64_OP(andnot, stclr)
125ATOMIC64_OP(or, stset)
126ATOMIC64_OP(xor, steor)
127ATOMIC64_OP(add, stadd)
Will Deaconc0385b22015-02-03 12:39:03 +0000128
Mark Rutlandef532452021-12-10 15:14:07 +0000129static inline void __lse_atomic64_sub(s64 i, atomic64_t *v)
130{
131 __lse_atomic64_add(-i, v);
132}
133
Will Deacon6822a84d2016-04-22 18:01:32 +0100134#undef ATOMIC64_OP
Will Deaconc09d6a02015-02-03 16:14:13 +0000135
Will Deacon2efe95f2016-04-22 18:01:33 +0100136#define ATOMIC64_FETCH_OP(name, mb, op, asm_op, cl...) \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100137static inline long __lse_atomic64_fetch_##op##name(s64 i, atomic64_t *v)\
Will Deacon2efe95f2016-04-22 18:01:33 +0100138{ \
Mark Rutland8a578a72021-12-10 15:14:09 +0000139 s64 old; \
140 \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100141 asm volatile( \
Sami Tolvanene0d58962019-10-31 12:57:05 -0700142 __LSE_PREAMBLE \
Mark Rutland8a578a72021-12-10 15:14:09 +0000143 " " #asm_op #mb " %[i], %[old], %[v]" \
144 : [v] "+Q" (v->counter), \
145 [old] "=r" (old) \
146 : [i] "r" (i) \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100147 : cl); \
Will Deacon2efe95f2016-04-22 18:01:33 +0100148 \
Mark Rutland8a578a72021-12-10 15:14:09 +0000149 return old; \
Will Deacon2efe95f2016-04-22 18:01:33 +0100150}
151
152#define ATOMIC64_FETCH_OPS(op, asm_op) \
153 ATOMIC64_FETCH_OP(_relaxed, , op, asm_op) \
154 ATOMIC64_FETCH_OP(_acquire, a, op, asm_op, "memory") \
155 ATOMIC64_FETCH_OP(_release, l, op, asm_op, "memory") \
156 ATOMIC64_FETCH_OP( , al, op, asm_op, "memory")
157
158ATOMIC64_FETCH_OPS(andnot, ldclr)
159ATOMIC64_FETCH_OPS(or, ldset)
160ATOMIC64_FETCH_OPS(xor, ldeor)
161ATOMIC64_FETCH_OPS(add, ldadd)
162
163#undef ATOMIC64_FETCH_OP
164#undef ATOMIC64_FETCH_OPS
165
Mark Rutlandef532452021-12-10 15:14:07 +0000166#define ATOMIC64_FETCH_OP_SUB(name) \
167static inline long __lse_atomic64_fetch_sub##name(s64 i, atomic64_t *v) \
168{ \
169 return __lse_atomic64_fetch_add##name(-i, v); \
170}
171
172ATOMIC64_FETCH_OP_SUB(_relaxed)
173ATOMIC64_FETCH_OP_SUB(_acquire)
174ATOMIC64_FETCH_OP_SUB(_release)
175ATOMIC64_FETCH_OP_SUB( )
176
177#undef ATOMIC64_FETCH_OP_SUB
178
Mark Rutland053f58b2021-12-10 15:14:10 +0000179#define ATOMIC64_OP_ADD_SUB_RETURN(name) \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100180static inline long __lse_atomic64_add_return##name(s64 i, atomic64_t *v)\
Will Deacon305d4542015-10-08 20:15:18 +0100181{ \
Mark Rutland053f58b2021-12-10 15:14:10 +0000182 return __lse_atomic64_fetch_add##name(i, v) + i; \
Mark Rutlandef532452021-12-10 15:14:07 +0000183} \
184 \
185static inline long __lse_atomic64_sub_return##name(s64 i, atomic64_t *v)\
186{ \
Mark Rutland053f58b2021-12-10 15:14:10 +0000187 return __lse_atomic64_fetch_sub##name(i, v) - i; \
Will Deaconc09d6a02015-02-03 16:14:13 +0000188}
189
Mark Rutland053f58b2021-12-10 15:14:10 +0000190ATOMIC64_OP_ADD_SUB_RETURN(_relaxed)
191ATOMIC64_OP_ADD_SUB_RETURN(_acquire)
192ATOMIC64_OP_ADD_SUB_RETURN(_release)
193ATOMIC64_OP_ADD_SUB_RETURN( )
Will Deacon305d4542015-10-08 20:15:18 +0100194
Mark Rutlandef532452021-12-10 15:14:07 +0000195#undef ATOMIC64_OP_ADD_SUB_RETURN
Will Deacon305d4542015-10-08 20:15:18 +0100196
Andrew Murrayaddfc382019-08-28 18:50:07 +0100197static inline void __lse_atomic64_and(s64 i, atomic64_t *v)
Will Deaconc09d6a02015-02-03 16:14:13 +0000198{
Mark Rutland5e9e43c2021-12-10 15:14:08 +0000199 return __lse_atomic64_andnot(~i, v);
Will Deaconc09d6a02015-02-03 16:14:13 +0000200}
201
Will Deacon2efe95f2016-04-22 18:01:33 +0100202#define ATOMIC64_FETCH_OP_AND(name, mb, cl...) \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100203static inline long __lse_atomic64_fetch_and##name(s64 i, atomic64_t *v) \
Will Deacon2efe95f2016-04-22 18:01:33 +0100204{ \
Mark Rutland5e9e43c2021-12-10 15:14:08 +0000205 return __lse_atomic64_fetch_andnot##name(~i, v); \
Will Deacon2efe95f2016-04-22 18:01:33 +0100206}
207
208ATOMIC64_FETCH_OP_AND(_relaxed, )
209ATOMIC64_FETCH_OP_AND(_acquire, a, "memory")
210ATOMIC64_FETCH_OP_AND(_release, l, "memory")
211ATOMIC64_FETCH_OP_AND( , al, "memory")
212
213#undef ATOMIC64_FETCH_OP_AND
214
Andrew Murrayaddfc382019-08-28 18:50:07 +0100215static inline s64 __lse_atomic64_dec_if_positive(atomic64_t *v)
Will Deaconc0385b22015-02-03 12:39:03 +0000216{
Andrew Murray3337cb52019-08-28 18:50:08 +0100217 unsigned long tmp;
218
Andrew Murrayaddfc382019-08-28 18:50:07 +0100219 asm volatile(
Sami Tolvanene0d58962019-10-31 12:57:05 -0700220 __LSE_PREAMBLE
Andrew Murray3337cb52019-08-28 18:50:08 +0100221 "1: ldr %x[tmp], %[v]\n"
222 " subs %[ret], %x[tmp], #1\n"
Will Deacondb262172015-05-29 14:44:06 +0100223 " b.lt 2f\n"
Andrew Murray3337cb52019-08-28 18:50:08 +0100224 " casal %x[tmp], %[ret], %[v]\n"
225 " sub %x[tmp], %x[tmp], #1\n"
226 " sub %x[tmp], %x[tmp], %[ret]\n"
227 " cbnz %x[tmp], 1b\n"
Andrew Murrayaddfc382019-08-28 18:50:07 +0100228 "2:"
Andrew Murray3337cb52019-08-28 18:50:08 +0100229 : [ret] "+&r" (v), [v] "+Q" (v->counter), [tmp] "=&r" (tmp)
Will Deaconc0385b22015-02-03 12:39:03 +0000230 :
Andrew Murray3337cb52019-08-28 18:50:08 +0100231 : "cc", "memory");
Will Deaconc0385b22015-02-03 12:39:03 +0000232
Andrew Murrayaddfc382019-08-28 18:50:07 +0100233 return (long)v;
Will Deaconc0385b22015-02-03 12:39:03 +0000234}
235
Will Deacon5ef3fe42018-09-13 13:30:45 +0100236#define __CMPXCHG_CASE(w, sfx, name, sz, mb, cl...) \
Will Deacona48e61d2019-10-01 11:43:13 +0100237static __always_inline u##sz \
238__lse__cmpxchg_case_##name##sz(volatile void *ptr, \
Will Deaconb4f92092018-09-13 14:28:33 +0100239 u##sz old, \
Will Deacon5ef3fe42018-09-13 13:30:45 +0100240 u##sz new) \
Will Deaconc342f782015-04-23 20:08:49 +0100241{ \
242 register unsigned long x0 asm ("x0") = (unsigned long)ptr; \
Will Deaconb4f92092018-09-13 14:28:33 +0100243 register u##sz x1 asm ("x1") = old; \
Will Deacon5ef3fe42018-09-13 13:30:45 +0100244 register u##sz x2 asm ("x2") = new; \
Andrew Murray3337cb52019-08-28 18:50:08 +0100245 unsigned long tmp; \
Will Deaconc342f782015-04-23 20:08:49 +0100246 \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100247 asm volatile( \
Sami Tolvanene0d58962019-10-31 12:57:05 -0700248 __LSE_PREAMBLE \
Andrew Murray3337cb52019-08-28 18:50:08 +0100249 " mov %" #w "[tmp], %" #w "[old]\n" \
250 " cas" #mb #sfx "\t%" #w "[tmp], %" #w "[new], %[v]\n" \
251 " mov %" #w "[ret], %" #w "[tmp]" \
Kees Cook3364c6c2022-01-12 12:22:59 -0800252 : [ret] "+r" (x0), [v] "+Q" (*(u##sz *)ptr), \
Andrew Murray3337cb52019-08-28 18:50:08 +0100253 [tmp] "=&r" (tmp) \
Will Deaconc342f782015-04-23 20:08:49 +0100254 : [old] "r" (x1), [new] "r" (x2) \
Andrew Murray3337cb52019-08-28 18:50:08 +0100255 : cl); \
Will Deaconc342f782015-04-23 20:08:49 +0100256 \
257 return x0; \
258}
259
Will Deacon5ef3fe42018-09-13 13:30:45 +0100260__CMPXCHG_CASE(w, b, , 8, )
261__CMPXCHG_CASE(w, h, , 16, )
262__CMPXCHG_CASE(w, , , 32, )
263__CMPXCHG_CASE(x, , , 64, )
264__CMPXCHG_CASE(w, b, acq_, 8, a, "memory")
265__CMPXCHG_CASE(w, h, acq_, 16, a, "memory")
266__CMPXCHG_CASE(w, , acq_, 32, a, "memory")
267__CMPXCHG_CASE(x, , acq_, 64, a, "memory")
268__CMPXCHG_CASE(w, b, rel_, 8, l, "memory")
269__CMPXCHG_CASE(w, h, rel_, 16, l, "memory")
270__CMPXCHG_CASE(w, , rel_, 32, l, "memory")
271__CMPXCHG_CASE(x, , rel_, 64, l, "memory")
272__CMPXCHG_CASE(w, b, mb_, 8, al, "memory")
273__CMPXCHG_CASE(w, h, mb_, 16, al, "memory")
274__CMPXCHG_CASE(w, , mb_, 32, al, "memory")
275__CMPXCHG_CASE(x, , mb_, 64, al, "memory")
Will Deaconc342f782015-04-23 20:08:49 +0100276
Will Deaconc342f782015-04-23 20:08:49 +0100277#undef __CMPXCHG_CASE
278
Will Deacone9a4b792015-05-14 18:05:50 +0100279#define __CMPXCHG_DBL(name, mb, cl...) \
Will Deacona48e61d2019-10-01 11:43:13 +0100280static __always_inline long \
281__lse__cmpxchg_double##name(unsigned long old1, \
Will Deacone9a4b792015-05-14 18:05:50 +0100282 unsigned long old2, \
283 unsigned long new1, \
284 unsigned long new2, \
285 volatile void *ptr) \
286{ \
287 unsigned long oldval1 = old1; \
288 unsigned long oldval2 = old2; \
289 register unsigned long x0 asm ("x0") = old1; \
290 register unsigned long x1 asm ("x1") = old2; \
291 register unsigned long x2 asm ("x2") = new1; \
292 register unsigned long x3 asm ("x3") = new2; \
293 register unsigned long x4 asm ("x4") = (unsigned long)ptr; \
294 \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100295 asm volatile( \
Sami Tolvanene0d58962019-10-31 12:57:05 -0700296 __LSE_PREAMBLE \
Will Deacone9a4b792015-05-14 18:05:50 +0100297 " casp" #mb "\t%[old1], %[old2], %[new1], %[new2], %[v]\n"\
298 " eor %[old1], %[old1], %[oldval1]\n" \
299 " eor %[old2], %[old2], %[oldval2]\n" \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100300 " orr %[old1], %[old1], %[old2]" \
Will Deacon32c3fa72018-05-21 17:44:57 +0100301 : [old1] "+&r" (x0), [old2] "+&r" (x1), \
Will Deacone9a4b792015-05-14 18:05:50 +0100302 [v] "+Q" (*(unsigned long *)ptr) \
303 : [new1] "r" (x2), [new2] "r" (x3), [ptr] "r" (x4), \
304 [oldval1] "r" (oldval1), [oldval2] "r" (oldval2) \
Andrew Murrayaddfc382019-08-28 18:50:07 +0100305 : cl); \
Will Deacone9a4b792015-05-14 18:05:50 +0100306 \
307 return x0; \
308}
309
310__CMPXCHG_DBL( , )
311__CMPXCHG_DBL(_mb, al, "memory")
312
Will Deacone9a4b792015-05-14 18:05:50 +0100313#undef __CMPXCHG_DBL
314
Will Deaconc0385b22015-02-03 12:39:03 +0000315#endif /* __ASM_ATOMIC_LSE_H */