blob: 31bb74adba082a03bcd121dfa97ac70037a2653d [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef _ASM_M32R_ATOMIC_H
2#define _ASM_M32R_ATOMIC_H
3
4/*
5 * linux/include/asm-m32r/atomic.h
6 *
7 * M32R version:
8 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
9 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
10 */
11
Matthew Wilcoxea4354672009-01-06 14:40:39 -080012#include <linux/types.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070013#include <asm/assembler.h>
David Howellsc9034c32012-03-28 18:30:02 +010014#include <asm/cmpxchg.h>
15#include <asm/dcache_clear.h>
Peter Zijlstra89607d52014-03-13 19:00:36 +010016#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070017
18/*
19 * Atomic operations that C can't guarantee us. Useful for
20 * resource counting etc..
21 */
22
Linus Torvalds1da177e2005-04-16 15:20:36 -070023#define ATOMIC_INIT(i) { (i) }
24
25/**
26 * atomic_read - read atomic variable
27 * @v: pointer of type atomic_t
28 *
29 * Atomically reads the value of @v.
30 */
Pranith Kumar22910592014-09-23 10:29:50 -040031#define atomic_read(v) ACCESS_ONCE((v)->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070032
33/**
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
36 * @i: required value
37 *
38 * Atomically sets the value of @v to @i.
39 */
40#define atomic_set(v,i) (((v)->counter) = (i))
41
Linus Torvalds1da177e2005-04-16 15:20:36 -070042#ifdef CONFIG_CHIP_M32700_TS1
Peter Zijlstrac9ebe212014-03-23 19:02:22 +010043#define __ATOMIC_CLOBBER , "r4"
44#else
45#define __ATOMIC_CLOBBER
46#endif
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
Peter Zijlstrac9ebe212014-03-23 19:02:22 +010048#define ATOMIC_OP(op) \
49static __inline__ void atomic_##op(int i, atomic_t *v) \
50{ \
51 unsigned long flags; \
52 int result; \
53 \
54 local_irq_save(flags); \
55 __asm__ __volatile__ ( \
56 "# atomic_" #op " \n\t" \
57 DCACHE_CLEAR("%0", "r4", "%1") \
58 M32R_LOCK" %0, @%1; \n\t" \
59 #op " %0, %2; \n\t" \
60 M32R_UNLOCK" %0, @%1; \n\t" \
61 : "=&r" (result) \
62 : "r" (&v->counter), "r" (i) \
63 : "memory" \
64 __ATOMIC_CLOBBER \
65 ); \
66 local_irq_restore(flags); \
67} \
68
69#define ATOMIC_OP_RETURN(op) \
70static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
71{ \
72 unsigned long flags; \
73 int result; \
74 \
75 local_irq_save(flags); \
76 __asm__ __volatile__ ( \
77 "# atomic_" #op "_return \n\t" \
78 DCACHE_CLEAR("%0", "r4", "%1") \
79 M32R_LOCK" %0, @%1; \n\t" \
80 #op " %0, %2; \n\t" \
81 M32R_UNLOCK" %0, @%1; \n\t" \
82 : "=&r" (result) \
83 : "r" (&v->counter), "r" (i) \
84 : "memory" \
85 __ATOMIC_CLOBBER \
86 ); \
87 local_irq_restore(flags); \
88 \
89 return result; \
Linus Torvalds1da177e2005-04-16 15:20:36 -070090}
91
Peter Zijlstrac9ebe212014-03-23 19:02:22 +010092#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
Linus Torvalds1da177e2005-04-16 15:20:36 -070093
Peter Zijlstrac9ebe212014-03-23 19:02:22 +010094ATOMIC_OPS(add)
95ATOMIC_OPS(sub)
Linus Torvalds1da177e2005-04-16 15:20:36 -070096
Peter Zijlstrac9ebe212014-03-23 19:02:22 +010097#undef ATOMIC_OPS
98#undef ATOMIC_OP_RETURN
99#undef ATOMIC_OP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700100
101/**
102 * atomic_sub_and_test - subtract value from variable and test result
103 * @i: integer value to subtract
104 * @v: pointer of type atomic_t
105 *
106 * Atomically subtracts @i from @v and returns
107 * true if the result is zero, or false for all
108 * other cases.
109 */
110#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
111
112/**
113 * atomic_inc_return - increment atomic variable and return it
114 * @v: pointer of type atomic_t
115 *
116 * Atomically increments @v by 1 and returns the result.
117 */
118static __inline__ int atomic_inc_return(atomic_t *v)
119{
120 unsigned long flags;
121 int result;
122
123 local_irq_save(flags);
124 __asm__ __volatile__ (
125 "# atomic_inc_return \n\t"
126 DCACHE_CLEAR("%0", "r4", "%1")
127 M32R_LOCK" %0, @%1; \n\t"
128 "addi %0, #1; \n\t"
129 M32R_UNLOCK" %0, @%1; \n\t"
130 : "=&r" (result)
131 : "r" (&v->counter)
132 : "memory"
Peter Zijlstrac9ebe212014-03-23 19:02:22 +0100133 __ATOMIC_CLOBBER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700134 );
135 local_irq_restore(flags);
136
137 return result;
138}
139
140/**
141 * atomic_dec_return - decrement atomic variable and return it
142 * @v: pointer of type atomic_t
143 *
144 * Atomically decrements @v by 1 and returns the result.
145 */
146static __inline__ int atomic_dec_return(atomic_t *v)
147{
148 unsigned long flags;
149 int result;
150
151 local_irq_save(flags);
152 __asm__ __volatile__ (
153 "# atomic_dec_return \n\t"
154 DCACHE_CLEAR("%0", "r4", "%1")
155 M32R_LOCK" %0, @%1; \n\t"
156 "addi %0, #-1; \n\t"
157 M32R_UNLOCK" %0, @%1; \n\t"
158 : "=&r" (result)
159 : "r" (&v->counter)
160 : "memory"
Peter Zijlstrac9ebe212014-03-23 19:02:22 +0100161 __ATOMIC_CLOBBER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700162 );
163 local_irq_restore(flags);
164
165 return result;
166}
167
168/**
169 * atomic_inc - increment atomic variable
170 * @v: pointer of type atomic_t
171 *
172 * Atomically increments @v by 1.
173 */
174#define atomic_inc(v) ((void)atomic_inc_return(v))
175
176/**
177 * atomic_dec - decrement atomic variable
178 * @v: pointer of type atomic_t
179 *
180 * Atomically decrements @v by 1.
181 */
182#define atomic_dec(v) ((void)atomic_dec_return(v))
183
184/**
185 * atomic_inc_and_test - increment and test
186 * @v: pointer of type atomic_t
187 *
188 * Atomically increments @v by 1
189 * and returns true if the result is zero, or false for all
190 * other cases.
191 */
192#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
193
194/**
195 * atomic_dec_and_test - decrement and test
196 * @v: pointer of type atomic_t
197 *
198 * Atomically decrements @v by 1 and
199 * returns true if the result is 0, or false for all
200 * other cases.
201 */
202#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
203
204/**
205 * atomic_add_negative - add and test if negative
206 * @v: pointer of type atomic_t
207 * @i: integer value to add
208 *
209 * Atomically adds @i to @v and returns true
210 * if the result is negative, or false when
211 * result is greater than or equal to zero.
212 */
213#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
214
Hirokazu Takata0332db52005-11-28 13:43:59 -0800215#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800216#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Hirokazu Takata0332db52005-11-28 13:43:59 -0800217
218/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700219 * __atomic_add_unless - add unless the number is a given value
Hirokazu Takata0332db52005-11-28 13:43:59 -0800220 * @v: pointer of type atomic_t
221 * @a: the amount to add to v...
222 * @u: ...unless v is equal to u.
223 *
224 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700225 * Returns the old value of @v.
Hirokazu Takata0332db52005-11-28 13:43:59 -0800226 */
Arun Sharmaf24219b2011-07-26 16:09:07 -0700227static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700228{
229 int c, old;
230 c = atomic_read(v);
231 for (;;) {
232 if (unlikely(c == (u)))
233 break;
234 old = atomic_cmpxchg((v), c, c + (a));
235 if (likely(old == c))
236 break;
237 c = old;
238 }
Arun Sharmaf24219b2011-07-26 16:09:07 -0700239 return c;
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700240}
241
Hirokazu Takata0332db52005-11-28 13:43:59 -0800242
Linus Torvalds1da177e2005-04-16 15:20:36 -0700243static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *addr)
244{
245 unsigned long flags;
246 unsigned long tmp;
247
248 local_irq_save(flags);
249 __asm__ __volatile__ (
250 "# atomic_clear_mask \n\t"
251 DCACHE_CLEAR("%0", "r5", "%1")
252 M32R_LOCK" %0, @%1; \n\t"
253 "and %0, %2; \n\t"
254 M32R_UNLOCK" %0, @%1; \n\t"
255 : "=&r" (tmp)
256 : "r" (addr), "r" (~mask)
257 : "memory"
Peter Zijlstrac9ebe212014-03-23 19:02:22 +0100258 __ATOMIC_CLOBBER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700259 );
260 local_irq_restore(flags);
261}
262
263static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr)
264{
265 unsigned long flags;
266 unsigned long tmp;
267
268 local_irq_save(flags);
269 __asm__ __volatile__ (
270 "# atomic_set_mask \n\t"
271 DCACHE_CLEAR("%0", "r5", "%1")
272 M32R_LOCK" %0, @%1; \n\t"
273 "or %0, %2; \n\t"
274 M32R_UNLOCK" %0, @%1; \n\t"
275 : "=&r" (tmp)
276 : "r" (addr), "r" (mask)
277 : "memory"
Peter Zijlstrac9ebe212014-03-23 19:02:22 +0100278 __ATOMIC_CLOBBER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700279 );
280 local_irq_restore(flags);
281}
282
Linus Torvalds1da177e2005-04-16 15:20:36 -0700283#endif /* _ASM_M32R_ATOMIC_H */