Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
Paul Mackerras | 09d4e0e | 2009-06-12 21:10:05 +0000 | [diff] [blame] | 2 | /* |
| 3 | * Generic implementation of 64-bit atomics using spinlocks, |
| 4 | * useful on processors that don't have 64-bit atomic instructions. |
| 5 | * |
| 6 | * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> |
Paul Mackerras | 09d4e0e | 2009-06-12 21:10:05 +0000 | [diff] [blame] | 7 | */ |
| 8 | #ifndef _ASM_GENERIC_ATOMIC64_H |
| 9 | #define _ASM_GENERIC_ATOMIC64_H |
Mark Rutland | ade5ef9 | 2018-06-21 13:13:07 +0100 | [diff] [blame] | 10 | #include <linux/types.h> |
Paul Mackerras | 09d4e0e | 2009-06-12 21:10:05 +0000 | [diff] [blame] | 11 | |
| 12 | typedef struct { |
Mark Rutland | 9255813 | 2019-05-22 14:22:35 +0100 | [diff] [blame] | 13 | s64 counter; |
Paul Mackerras | 09d4e0e | 2009-06-12 21:10:05 +0000 | [diff] [blame] | 14 | } atomic64_t; |
| 15 | |
| 16 | #define ATOMIC64_INIT(i) { (i) } |
| 17 | |
Mark Rutland | 1bdadf4 | 2021-05-25 15:02:09 +0100 | [diff] [blame] | 18 | extern s64 generic_atomic64_read(const atomic64_t *v); |
| 19 | extern void generic_atomic64_set(atomic64_t *v, s64 i); |
Peter Zijlstra | 9d664c0 | 2017-06-09 13:05:06 +0200 | [diff] [blame] | 20 | |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 21 | #define ATOMIC64_OP(op) \ |
Mark Rutland | 1bdadf4 | 2021-05-25 15:02:09 +0100 | [diff] [blame] | 22 | extern void generic_atomic64_##op(s64 a, atomic64_t *v); |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 23 | |
| 24 | #define ATOMIC64_OP_RETURN(op) \ |
Mark Rutland | 1bdadf4 | 2021-05-25 15:02:09 +0100 | [diff] [blame] | 25 | extern s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v); |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 26 | |
Peter Zijlstra | 28aa2bd | 2016-04-18 00:54:38 +0200 | [diff] [blame] | 27 | #define ATOMIC64_FETCH_OP(op) \ |
Mark Rutland | 1bdadf4 | 2021-05-25 15:02:09 +0100 | [diff] [blame] | 28 | extern s64 generic_atomic64_fetch_##op(s64 a, atomic64_t *v); |
Peter Zijlstra | 28aa2bd | 2016-04-18 00:54:38 +0200 | [diff] [blame] | 29 | |
| 30 | #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op) |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 31 | |
| 32 | ATOMIC64_OPS(add) |
| 33 | ATOMIC64_OPS(sub) |
| 34 | |
Peter Zijlstra | 28aa2bd | 2016-04-18 00:54:38 +0200 | [diff] [blame] | 35 | #undef ATOMIC64_OPS |
| 36 | #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op) |
| 37 | |
| 38 | ATOMIC64_OPS(and) |
| 39 | ATOMIC64_OPS(or) |
| 40 | ATOMIC64_OPS(xor) |
Peter Zijlstra | e6942b7 | 2014-04-23 19:32:50 +0200 | [diff] [blame] | 41 | |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 42 | #undef ATOMIC64_OPS |
Peter Zijlstra | 28aa2bd | 2016-04-18 00:54:38 +0200 | [diff] [blame] | 43 | #undef ATOMIC64_FETCH_OP |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 44 | #undef ATOMIC64_OP_RETURN |
| 45 | #undef ATOMIC64_OP |
| 46 | |
Mark Rutland | 1bdadf4 | 2021-05-25 15:02:09 +0100 | [diff] [blame] | 47 | extern s64 generic_atomic64_dec_if_positive(atomic64_t *v); |
| 48 | extern s64 generic_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n); |
| 49 | extern s64 generic_atomic64_xchg(atomic64_t *v, s64 new); |
| 50 | extern s64 generic_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u); |
| 51 | |
Mark Rutland | 1bdadf4 | 2021-05-25 15:02:09 +0100 | [diff] [blame] | 52 | #define arch_atomic64_read generic_atomic64_read |
| 53 | #define arch_atomic64_set generic_atomic64_set |
| 54 | #define arch_atomic64_set_release generic_atomic64_set |
| 55 | |
| 56 | #define arch_atomic64_add generic_atomic64_add |
| 57 | #define arch_atomic64_add_return generic_atomic64_add_return |
| 58 | #define arch_atomic64_fetch_add generic_atomic64_fetch_add |
| 59 | #define arch_atomic64_sub generic_atomic64_sub |
| 60 | #define arch_atomic64_sub_return generic_atomic64_sub_return |
| 61 | #define arch_atomic64_fetch_sub generic_atomic64_fetch_sub |
| 62 | |
| 63 | #define arch_atomic64_and generic_atomic64_and |
| 64 | #define arch_atomic64_fetch_and generic_atomic64_fetch_and |
| 65 | #define arch_atomic64_or generic_atomic64_or |
| 66 | #define arch_atomic64_fetch_or generic_atomic64_fetch_or |
| 67 | #define arch_atomic64_xor generic_atomic64_xor |
| 68 | #define arch_atomic64_fetch_xor generic_atomic64_fetch_xor |
| 69 | |
| 70 | #define arch_atomic64_dec_if_positive generic_atomic64_dec_if_positive |
| 71 | #define arch_atomic64_cmpxchg generic_atomic64_cmpxchg |
| 72 | #define arch_atomic64_xchg generic_atomic64_xchg |
| 73 | #define arch_atomic64_fetch_add_unless generic_atomic64_fetch_add_unless |
| 74 | |
Paul Mackerras | 09d4e0e | 2009-06-12 21:10:05 +0000 | [diff] [blame] | 75 | #endif /* _ASM_GENERIC_ATOMIC64_H */ |