blob: dce6ede740e8814b68acae97c81457e7f92cecc2 [file] [log] [blame]
Will Deaconc0385b22015-02-03 12:39:03 +00001/*
2 * Based on arch/arm/include/asm/atomic.h
3 *
4 * Copyright (C) 1996 Russell King.
5 * Copyright (C) 2002 Deep Blue Solutions Ltd.
6 * Copyright (C) 2012 ARM Ltd.
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with this program. If not, see <http://www.gnu.org/licenses/>.
19 */
20
21#ifndef __ASM_ATOMIC_LSE_H
22#define __ASM_ATOMIC_LSE_H
23
24#ifndef __ARM64_IN_ATOMIC_IMPL
25#error "please don't include this file directly"
26#endif
27
28/* Move the ll/sc atomics out-of-line */
29#define __LL_SC_INLINE
30#define __LL_SC_PREFIX(x) __ll_sc_##x
31#define __LL_SC_EXPORT(x) EXPORT_SYMBOL(__LL_SC_PREFIX(x))
32
33/* Macros for constructing calls to out-of-line ll/sc atomics */
34#define __LL_SC_CALL(op) \
35 "bl\t" __stringify(__LL_SC_PREFIX(atomic_##op)) "\n"
36#define __LL_SC_CALL64(op) \
37 "bl\t" __stringify(__LL_SC_PREFIX(atomic64_##op)) "\n"
38
39#define ATOMIC_OP(op, asm_op) \
40static inline void atomic_##op(int i, atomic_t *v) \
41{ \
42 register int w0 asm ("w0") = i; \
43 register atomic_t *x1 asm ("x1") = v; \
44 \
45 asm volatile( \
46 __LL_SC_CALL(op) \
47 : "+r" (w0), "+Q" (v->counter) \
48 : "r" (x1) \
49 : "x30"); \
50} \
51
52#define ATOMIC_OP_RETURN(op, asm_op) \
53static inline int atomic_##op##_return(int i, atomic_t *v) \
54{ \
55 register int w0 asm ("w0") = i; \
56 register atomic_t *x1 asm ("x1") = v; \
57 \
58 asm volatile( \
59 __LL_SC_CALL(op##_return) \
60 : "+r" (w0) \
61 : "r" (x1) \
62 : "x30", "memory"); \
63 \
64 return w0; \
65}
66
67#define ATOMIC_OPS(op, asm_op) \
68 ATOMIC_OP(op, asm_op) \
69 ATOMIC_OP_RETURN(op, asm_op)
70
71ATOMIC_OPS(add, add)
72ATOMIC_OPS(sub, sub)
73
74ATOMIC_OP(and, and)
75ATOMIC_OP(andnot, bic)
76ATOMIC_OP(or, orr)
77ATOMIC_OP(xor, eor)
78
79#undef ATOMIC_OPS
80#undef ATOMIC_OP_RETURN
81#undef ATOMIC_OP
82
83static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
84{
85 register unsigned long x0 asm ("x0") = (unsigned long)ptr;
86 register int w1 asm ("w1") = old;
87 register int w2 asm ("w2") = new;
88
89 asm volatile(
90 __LL_SC_CALL(cmpxchg)
91 : "+r" (x0)
92 : "r" (w1), "r" (w2)
93 : "x30", "cc", "memory");
94
95 return x0;
96}
97
98#define ATOMIC64_OP(op, asm_op) \
99static inline void atomic64_##op(long i, atomic64_t *v) \
100{ \
101 register long x0 asm ("x0") = i; \
102 register atomic64_t *x1 asm ("x1") = v; \
103 \
104 asm volatile( \
105 __LL_SC_CALL64(op) \
106 : "+r" (x0), "+Q" (v->counter) \
107 : "r" (x1) \
108 : "x30"); \
109} \
110
111#define ATOMIC64_OP_RETURN(op, asm_op) \
112static inline long atomic64_##op##_return(long i, atomic64_t *v) \
113{ \
114 register long x0 asm ("x0") = i; \
115 register atomic64_t *x1 asm ("x1") = v; \
116 \
117 asm volatile( \
118 __LL_SC_CALL64(op##_return) \
119 : "+r" (x0) \
120 : "r" (x1) \
121 : "x30", "memory"); \
122 \
123 return x0; \
124}
125
126#define ATOMIC64_OPS(op, asm_op) \
127 ATOMIC64_OP(op, asm_op) \
128 ATOMIC64_OP_RETURN(op, asm_op)
129
130ATOMIC64_OPS(add, add)
131ATOMIC64_OPS(sub, sub)
132
133ATOMIC64_OP(and, and)
134ATOMIC64_OP(andnot, bic)
135ATOMIC64_OP(or, orr)
136ATOMIC64_OP(xor, eor)
137
138#undef ATOMIC64_OPS
139#undef ATOMIC64_OP_RETURN
140#undef ATOMIC64_OP
141
142static inline long atomic64_cmpxchg(atomic64_t *ptr, long old, long new)
143{
144 register unsigned long x0 asm ("x0") = (unsigned long)ptr;
145 register long x1 asm ("x1") = old;
146 register long x2 asm ("x2") = new;
147
148 asm volatile(
149 __LL_SC_CALL64(cmpxchg)
150 : "+r" (x0)
151 : "r" (x1), "r" (x2)
152 : "x30", "cc", "memory");
153
154 return x0;
155}
156
157static inline long atomic64_dec_if_positive(atomic64_t *v)
158{
159 register unsigned long x0 asm ("x0") = (unsigned long)v;
160
161 asm volatile(
162 __LL_SC_CALL64(dec_if_positive)
163 : "+r" (x0)
164 :
165 : "x30", "cc", "memory");
166
167 return x0;
168}
169
170#endif /* __ASM_ATOMIC_LSE_H */