Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 1 | #!/bin/sh |
| 2 | # SPDX-License-Identifier: GPL-2.0 |
| 3 | |
| 4 | ATOMICDIR=$(dirname $0) |
| 5 | |
| 6 | . ${ATOMICDIR}/atomic-tbl.sh |
| 7 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 8 | #gen_template_fallback(template, meta, pfx, name, sfx, order, atomic, int, args...) |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 9 | gen_template_fallback() |
| 10 | { |
| 11 | local template="$1"; shift |
| 12 | local meta="$1"; shift |
| 13 | local pfx="$1"; shift |
| 14 | local name="$1"; shift |
| 15 | local sfx="$1"; shift |
| 16 | local order="$1"; shift |
| 17 | local atomic="$1"; shift |
| 18 | local int="$1"; shift |
| 19 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 20 | local atomicname="arch_${atomic}_${pfx}${name}${sfx}${order}" |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 21 | |
| 22 | local ret="$(gen_ret_type "${meta}" "${int}")" |
| 23 | local retstmt="$(gen_ret_stmt "${meta}")" |
| 24 | local params="$(gen_params "${int}" "${atomic}" "$@")" |
| 25 | local args="$(gen_args "$@")" |
| 26 | |
| 27 | if [ ! -z "${template}" ]; then |
| 28 | printf "#ifndef ${atomicname}\n" |
| 29 | . ${template} |
| 30 | printf "#define ${atomicname} ${atomicname}\n" |
| 31 | printf "#endif\n\n" |
| 32 | fi |
| 33 | } |
| 34 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 35 | #gen_proto_fallback(meta, pfx, name, sfx, order, atomic, int, args...) |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 36 | gen_proto_fallback() |
| 37 | { |
| 38 | local meta="$1"; shift |
| 39 | local pfx="$1"; shift |
| 40 | local name="$1"; shift |
| 41 | local sfx="$1"; shift |
| 42 | local order="$1"; shift |
| 43 | |
| 44 | local tmpl="$(find_fallback_template "${pfx}" "${name}" "${sfx}" "${order}")" |
| 45 | gen_template_fallback "${tmpl}" "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "$@" |
| 46 | } |
| 47 | |
| 48 | #gen_basic_fallbacks(basename) |
| 49 | gen_basic_fallbacks() |
| 50 | { |
| 51 | local basename="$1"; shift |
| 52 | cat << EOF |
| 53 | #define ${basename}_acquire ${basename} |
| 54 | #define ${basename}_release ${basename} |
| 55 | #define ${basename}_relaxed ${basename} |
| 56 | EOF |
| 57 | } |
| 58 | |
Peter Zijlstra | 5faafd5 | 2020-06-25 15:55:14 +0200 | [diff] [blame] | 59 | gen_proto_order_variant() |
| 60 | { |
| 61 | local meta="$1"; shift |
| 62 | local pfx="$1"; shift |
| 63 | local name="$1"; shift |
| 64 | local sfx="$1"; shift |
| 65 | local order="$1"; shift |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 66 | local atomic="$1" |
Peter Zijlstra | 5faafd5 | 2020-06-25 15:55:14 +0200 | [diff] [blame] | 67 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 68 | local basename="arch_${atomic}_${pfx}${name}${sfx}" |
Peter Zijlstra | 5faafd5 | 2020-06-25 15:55:14 +0200 | [diff] [blame] | 69 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 70 | printf "#define ${basename}${order} ${basename}${order}\n" |
Peter Zijlstra | 5faafd5 | 2020-06-25 15:55:14 +0200 | [diff] [blame] | 71 | } |
| 72 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 73 | #gen_proto_order_variants(meta, pfx, name, sfx, atomic, int, args...) |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 74 | gen_proto_order_variants() |
| 75 | { |
| 76 | local meta="$1"; shift |
| 77 | local pfx="$1"; shift |
| 78 | local name="$1"; shift |
| 79 | local sfx="$1"; shift |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 80 | local atomic="$1" |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 81 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 82 | local basename="arch_${atomic}_${pfx}${name}${sfx}" |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 83 | |
| 84 | local template="$(find_fallback_template "${pfx}" "${name}" "${sfx}" "${order}")" |
| 85 | |
| 86 | # If we don't have relaxed atomics, then we don't bother with ordering fallbacks |
| 87 | # read_acquire and set_release need to be templated, though |
| 88 | if ! meta_has_relaxed "${meta}"; then |
| 89 | gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "" "$@" |
| 90 | |
| 91 | if meta_has_acquire "${meta}"; then |
| 92 | gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_acquire" "$@" |
| 93 | fi |
| 94 | |
| 95 | if meta_has_release "${meta}"; then |
| 96 | gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_release" "$@" |
| 97 | fi |
| 98 | |
| 99 | return |
| 100 | fi |
| 101 | |
| 102 | printf "#ifndef ${basename}_relaxed\n" |
| 103 | |
| 104 | if [ ! -z "${template}" ]; then |
| 105 | printf "#ifdef ${basename}\n" |
| 106 | fi |
| 107 | |
| 108 | gen_basic_fallbacks "${basename}" |
| 109 | |
| 110 | if [ ! -z "${template}" ]; then |
Mark Rutland | 47401d9 | 2021-07-13 11:52:49 +0100 | [diff] [blame] | 111 | printf "#endif /* ${basename} */\n\n" |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 112 | gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "" "$@" |
| 113 | gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_acquire" "$@" |
| 114 | gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_release" "$@" |
| 115 | gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_relaxed" "$@" |
| 116 | fi |
| 117 | |
| 118 | printf "#else /* ${basename}_relaxed */\n\n" |
| 119 | |
| 120 | gen_template_fallback "${ATOMICDIR}/fallbacks/acquire" "${meta}" "${pfx}" "${name}" "${sfx}" "_acquire" "$@" |
| 121 | gen_template_fallback "${ATOMICDIR}/fallbacks/release" "${meta}" "${pfx}" "${name}" "${sfx}" "_release" "$@" |
| 122 | gen_template_fallback "${ATOMICDIR}/fallbacks/fence" "${meta}" "${pfx}" "${name}" "${sfx}" "" "$@" |
| 123 | |
| 124 | printf "#endif /* ${basename}_relaxed */\n\n" |
| 125 | } |
| 126 | |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 127 | gen_order_fallbacks() |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 128 | { |
| 129 | local xchg="$1"; shift |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 130 | |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 131 | cat <<EOF |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 132 | |
| 133 | #ifndef ${xchg}_acquire |
| 134 | #define ${xchg}_acquire(...) \\ |
| 135 | __atomic_op_acquire(${xchg}, __VA_ARGS__) |
| 136 | #endif |
| 137 | |
| 138 | #ifndef ${xchg}_release |
| 139 | #define ${xchg}_release(...) \\ |
| 140 | __atomic_op_release(${xchg}, __VA_ARGS__) |
| 141 | #endif |
| 142 | |
| 143 | #ifndef ${xchg} |
| 144 | #define ${xchg}(...) \\ |
| 145 | __atomic_op_fence(${xchg}, __VA_ARGS__) |
| 146 | #endif |
| 147 | |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 148 | EOF |
| 149 | } |
| 150 | |
| 151 | gen_xchg_fallbacks() |
| 152 | { |
| 153 | local xchg="$1"; shift |
| 154 | printf "#ifndef ${xchg}_relaxed\n" |
| 155 | |
| 156 | gen_basic_fallbacks ${xchg} |
| 157 | |
| 158 | printf "#else /* ${xchg}_relaxed */\n" |
| 159 | |
| 160 | gen_order_fallbacks ${xchg} |
| 161 | |
| 162 | printf "#endif /* ${xchg}_relaxed */\n\n" |
| 163 | } |
| 164 | |
| 165 | gen_try_cmpxchg_fallback() |
| 166 | { |
| 167 | local order="$1"; shift; |
| 168 | |
| 169 | cat <<EOF |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 170 | #ifndef arch_try_cmpxchg${order} |
| 171 | #define arch_try_cmpxchg${order}(_ptr, _oldp, _new) \\ |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 172 | ({ \\ |
| 173 | typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \\ |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 174 | ___r = arch_cmpxchg${order}((_ptr), ___o, (_new)); \\ |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 175 | if (unlikely(___r != ___o)) \\ |
| 176 | *___op = ___r; \\ |
| 177 | likely(___r == ___o); \\ |
| 178 | }) |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 179 | #endif /* arch_try_cmpxchg${order} */ |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 180 | |
| 181 | EOF |
| 182 | } |
| 183 | |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 184 | gen_try_cmpxchg_fallbacks() |
| 185 | { |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 186 | printf "#ifndef arch_try_cmpxchg_relaxed\n" |
| 187 | printf "#ifdef arch_try_cmpxchg\n" |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 188 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 189 | gen_basic_fallbacks "arch_try_cmpxchg" |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 190 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 191 | printf "#endif /* arch_try_cmpxchg */\n\n" |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 192 | |
| 193 | for order in "" "_acquire" "_release" "_relaxed"; do |
| 194 | gen_try_cmpxchg_fallback "${order}" |
| 195 | done |
| 196 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 197 | printf "#else /* arch_try_cmpxchg_relaxed */\n" |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 198 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 199 | gen_order_fallbacks "arch_try_cmpxchg" |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 200 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 201 | printf "#endif /* arch_try_cmpxchg_relaxed */\n\n" |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 202 | } |
| 203 | |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 204 | cat << EOF |
| 205 | // SPDX-License-Identifier: GPL-2.0 |
| 206 | |
| 207 | // Generated by $0 |
| 208 | // DO NOT MODIFY THIS FILE DIRECTLY |
| 209 | |
| 210 | #ifndef _LINUX_ATOMIC_FALLBACK_H |
| 211 | #define _LINUX_ATOMIC_FALLBACK_H |
| 212 | |
Marco Elver | 765dcd2 | 2019-11-26 15:04:05 +0100 | [diff] [blame] | 213 | #include <linux/compiler.h> |
| 214 | |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 215 | EOF |
| 216 | |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 217 | for xchg in "arch_xchg" "arch_cmpxchg" "arch_cmpxchg64"; do |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 218 | gen_xchg_fallbacks "${xchg}" |
| 219 | done |
| 220 | |
Peter Zijlstra | 29f006f | 2020-08-29 22:03:35 +0900 | [diff] [blame] | 221 | gen_try_cmpxchg_fallbacks |
| 222 | |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 223 | grep '^[a-z]' "$1" | while read name meta args; do |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 224 | gen_proto "${meta}" "${name}" "atomic" "int" ${args} |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 225 | done |
| 226 | |
| 227 | cat <<EOF |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 228 | #ifdef CONFIG_GENERIC_ATOMIC64 |
| 229 | #include <asm-generic/atomic64.h> |
| 230 | #endif |
| 231 | |
| 232 | EOF |
| 233 | |
| 234 | grep '^[a-z]' "$1" | while read name meta args; do |
Mark Rutland | f3e615b | 2021-07-13 11:52:50 +0100 | [diff] [blame] | 235 | gen_proto "${meta}" "${name}" "atomic64" "s64" ${args} |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 236 | done |
| 237 | |
| 238 | cat <<EOF |
Mark Rutland | ace9bad | 2018-09-04 11:48:25 +0100 | [diff] [blame] | 239 | #endif /* _LINUX_ATOMIC_FALLBACK_H */ |
| 240 | EOF |