Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (2004) Linus Torvalds |
| 4 | * |
| 5 | * Author: Zwane Mwaikambo <zwane@fsmlabs.com> |
| 6 | * |
Ingo Molnar | fb1c8f9 | 2005-09-10 00:25:56 -0700 | [diff] [blame] | 7 | * Copyright (2004, 2005) Ingo Molnar |
| 8 | * |
| 9 | * This file contains the spinlock/rwlock implementations for the |
| 10 | * SMP and the DEBUG_SPINLOCK cases. (UP-nondebug inlines them) |
Andi Kleen | 0cb91a2 | 2006-09-26 10:52:28 +0200 | [diff] [blame] | 11 | * |
| 12 | * Note that some architectures have special knowledge about the |
| 13 | * stack frames of these functions in their profile_pc. If you |
| 14 | * change anything significant here that could change the stack |
| 15 | * frame contact the architecture maintainers. |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 16 | */ |
| 17 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 18 | #include <linux/linkage.h> |
| 19 | #include <linux/preempt.h> |
| 20 | #include <linux/spinlock.h> |
| 21 | #include <linux/interrupt.h> |
Ingo Molnar | 8a25d5d | 2006-07-03 00:24:54 -0700 | [diff] [blame] | 22 | #include <linux/debug_locks.h> |
Paul Gortmaker | 9984de1 | 2011-05-23 14:51:41 -0400 | [diff] [blame] | 23 | #include <linux/export.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 24 | |
Will Deacon | d1be6a2 | 2019-02-22 12:48:44 +0000 | [diff] [blame] | 25 | #ifdef CONFIG_MMIOWB |
| 26 | #ifndef arch_mmiowb_state |
| 27 | DEFINE_PER_CPU(struct mmiowb_state, __mmiowb_state); |
| 28 | EXPORT_PER_CPU_SYMBOL(__mmiowb_state); |
| 29 | #endif |
| 30 | #endif |
| 31 | |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 32 | /* |
| 33 | * If lockdep is enabled then we use the non-preemption spin-ops |
| 34 | * even on CONFIG_PREEMPT, because lockdep assumes that interrupts are |
| 35 | * not re-enabled during lock-acquire (which the preempt-spin-ops do): |
| 36 | */ |
| 37 | #if !defined(CONFIG_GENERIC_LOCKBREAK) || defined(CONFIG_DEBUG_LOCK_ALLOC) |
| 38 | /* |
| 39 | * The __lock_function inlines are taken from |
Cheng Jian | f791dd2 | 2017-11-03 18:59:48 +0800 | [diff] [blame] | 40 | * spinlock : include/linux/spinlock_api_smp.h |
| 41 | * rwlock : include/linux/rwlock_api_smp.h |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 42 | */ |
| 43 | #else |
Will Deacon | c14c338 | 2013-09-11 14:23:23 -0700 | [diff] [blame] | 44 | |
| 45 | /* |
| 46 | * Some architectures can relax in favour of the CPU owning the lock. |
| 47 | */ |
| 48 | #ifndef arch_read_relax |
| 49 | # define arch_read_relax(l) cpu_relax() |
| 50 | #endif |
| 51 | #ifndef arch_write_relax |
| 52 | # define arch_write_relax(l) cpu_relax() |
| 53 | #endif |
| 54 | #ifndef arch_spin_relax |
| 55 | # define arch_spin_relax(l) cpu_relax() |
| 56 | #endif |
| 57 | |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 58 | /* |
| 59 | * We build the __lock_function inlines here. They are too large for |
| 60 | * inlining all over the place, but here is only one user per function |
Ingo Molnar | e2db759 | 2021-03-22 02:35:05 +0100 | [diff] [blame] | 61 | * which embeds them into the calling _lock_function below. |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 62 | * |
| 63 | * This could be a long-held lock. We both prepare to spin for a long |
Ingo Molnar | e2db759 | 2021-03-22 02:35:05 +0100 | [diff] [blame] | 64 | * time (making _this_ CPU preemptible if possible), and we also signal |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 65 | * towards that other CPU that it should break the lock ASAP. |
| 66 | */ |
| 67 | #define BUILD_LOCK_OPS(op, locktype) \ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 68 | void __lockfunc __raw_##op##_lock(locktype##_t *lock) \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 69 | { \ |
| 70 | for (;;) { \ |
| 71 | preempt_disable(); \ |
Thomas Gleixner | 9828ea9 | 2009-12-03 20:55:53 +0100 | [diff] [blame] | 72 | if (likely(do_raw_##op##_trylock(lock))) \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 73 | break; \ |
| 74 | preempt_enable(); \ |
| 75 | \ |
Will Deacon | f87f3a3 | 2017-11-28 18:42:18 +0000 | [diff] [blame] | 76 | arch_##op##_relax(&lock->raw_lock); \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 77 | } \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 78 | } \ |
| 79 | \ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 80 | unsigned long __lockfunc __raw_##op##_lock_irqsave(locktype##_t *lock) \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 81 | { \ |
| 82 | unsigned long flags; \ |
| 83 | \ |
| 84 | for (;;) { \ |
| 85 | preempt_disable(); \ |
| 86 | local_irq_save(flags); \ |
Thomas Gleixner | 9828ea9 | 2009-12-03 20:55:53 +0100 | [diff] [blame] | 87 | if (likely(do_raw_##op##_trylock(lock))) \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 88 | break; \ |
| 89 | local_irq_restore(flags); \ |
| 90 | preempt_enable(); \ |
| 91 | \ |
Will Deacon | f87f3a3 | 2017-11-28 18:42:18 +0000 | [diff] [blame] | 92 | arch_##op##_relax(&lock->raw_lock); \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 93 | } \ |
Will Deacon | d89c703 | 2017-11-28 18:42:19 +0000 | [diff] [blame] | 94 | \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 95 | return flags; \ |
| 96 | } \ |
| 97 | \ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 98 | void __lockfunc __raw_##op##_lock_irq(locktype##_t *lock) \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 99 | { \ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 100 | _raw_##op##_lock_irqsave(lock); \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 101 | } \ |
| 102 | \ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 103 | void __lockfunc __raw_##op##_lock_bh(locktype##_t *lock) \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 104 | { \ |
| 105 | unsigned long flags; \ |
| 106 | \ |
| 107 | /* */ \ |
| 108 | /* Careful: we must exclude softirqs too, hence the */ \ |
| 109 | /* irq-disabling. We use the generic preemption-aware */ \ |
| 110 | /* function: */ \ |
| 111 | /**/ \ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 112 | flags = _raw_##op##_lock_irqsave(lock); \ |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 113 | local_bh_disable(); \ |
| 114 | local_irq_restore(flags); \ |
| 115 | } \ |
| 116 | |
| 117 | /* |
| 118 | * Build preemption-friendly versions of the following |
| 119 | * lock-spinning functions: |
| 120 | * |
| 121 | * __[spin|read|write]_lock() |
| 122 | * __[spin|read|write]_lock_irq() |
| 123 | * __[spin|read|write]_lock_irqsave() |
| 124 | * __[spin|read|write]_lock_bh() |
| 125 | */ |
Thomas Gleixner | c2f21ce | 2009-12-02 20:02:59 +0100 | [diff] [blame] | 126 | BUILD_LOCK_OPS(spin, raw_spinlock); |
Thomas Gleixner | 8282947 | 2021-08-15 23:28:28 +0200 | [diff] [blame] | 127 | |
| 128 | #ifndef CONFIG_PREEMPT_RT |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 129 | BUILD_LOCK_OPS(read, rwlock); |
| 130 | BUILD_LOCK_OPS(write, rwlock); |
Thomas Gleixner | 8282947 | 2021-08-15 23:28:28 +0200 | [diff] [blame] | 131 | #endif |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 132 | |
| 133 | #endif |
| 134 | |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 135 | #ifndef CONFIG_INLINE_SPIN_TRYLOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 136 | int __lockfunc _raw_spin_trylock(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 137 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 138 | return __raw_spin_trylock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 139 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 140 | EXPORT_SYMBOL(_raw_spin_trylock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 141 | #endif |
| 142 | |
| 143 | #ifndef CONFIG_INLINE_SPIN_TRYLOCK_BH |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 144 | int __lockfunc _raw_spin_trylock_bh(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 145 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 146 | return __raw_spin_trylock_bh(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 147 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 148 | EXPORT_SYMBOL(_raw_spin_trylock_bh); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 149 | #endif |
| 150 | |
| 151 | #ifndef CONFIG_INLINE_SPIN_LOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 152 | void __lockfunc _raw_spin_lock(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 153 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 154 | __raw_spin_lock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 155 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 156 | EXPORT_SYMBOL(_raw_spin_lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 157 | #endif |
| 158 | |
| 159 | #ifndef CONFIG_INLINE_SPIN_LOCK_IRQSAVE |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 160 | unsigned long __lockfunc _raw_spin_lock_irqsave(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 161 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 162 | return __raw_spin_lock_irqsave(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 163 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 164 | EXPORT_SYMBOL(_raw_spin_lock_irqsave); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 165 | #endif |
| 166 | |
| 167 | #ifndef CONFIG_INLINE_SPIN_LOCK_IRQ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 168 | void __lockfunc _raw_spin_lock_irq(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 169 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 170 | __raw_spin_lock_irq(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 171 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 172 | EXPORT_SYMBOL(_raw_spin_lock_irq); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 173 | #endif |
| 174 | |
| 175 | #ifndef CONFIG_INLINE_SPIN_LOCK_BH |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 176 | void __lockfunc _raw_spin_lock_bh(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 177 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 178 | __raw_spin_lock_bh(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 179 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 180 | EXPORT_SYMBOL(_raw_spin_lock_bh); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 181 | #endif |
| 182 | |
Raghavendra K T | e335e3e | 2012-03-22 15:25:08 +0530 | [diff] [blame] | 183 | #ifdef CONFIG_UNINLINE_SPIN_UNLOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 184 | void __lockfunc _raw_spin_unlock(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 185 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 186 | __raw_spin_unlock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 187 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 188 | EXPORT_SYMBOL(_raw_spin_unlock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 189 | #endif |
| 190 | |
| 191 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_IRQRESTORE |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 192 | void __lockfunc _raw_spin_unlock_irqrestore(raw_spinlock_t *lock, unsigned long flags) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 193 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 194 | __raw_spin_unlock_irqrestore(lock, flags); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 195 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 196 | EXPORT_SYMBOL(_raw_spin_unlock_irqrestore); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 197 | #endif |
| 198 | |
| 199 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_IRQ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 200 | void __lockfunc _raw_spin_unlock_irq(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 201 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 202 | __raw_spin_unlock_irq(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 203 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 204 | EXPORT_SYMBOL(_raw_spin_unlock_irq); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 205 | #endif |
| 206 | |
| 207 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_BH |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 208 | void __lockfunc _raw_spin_unlock_bh(raw_spinlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 209 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 210 | __raw_spin_unlock_bh(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 211 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 212 | EXPORT_SYMBOL(_raw_spin_unlock_bh); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 213 | #endif |
| 214 | |
Thomas Gleixner | 8282947 | 2021-08-15 23:28:28 +0200 | [diff] [blame] | 215 | #ifndef CONFIG_PREEMPT_RT |
| 216 | |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 217 | #ifndef CONFIG_INLINE_READ_TRYLOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 218 | int __lockfunc _raw_read_trylock(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 219 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 220 | return __raw_read_trylock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 221 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 222 | EXPORT_SYMBOL(_raw_read_trylock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 223 | #endif |
| 224 | |
| 225 | #ifndef CONFIG_INLINE_READ_LOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 226 | void __lockfunc _raw_read_lock(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 227 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 228 | __raw_read_lock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 229 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 230 | EXPORT_SYMBOL(_raw_read_lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 231 | #endif |
| 232 | |
| 233 | #ifndef CONFIG_INLINE_READ_LOCK_IRQSAVE |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 234 | unsigned long __lockfunc _raw_read_lock_irqsave(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 235 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 236 | return __raw_read_lock_irqsave(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 237 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 238 | EXPORT_SYMBOL(_raw_read_lock_irqsave); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 239 | #endif |
| 240 | |
| 241 | #ifndef CONFIG_INLINE_READ_LOCK_IRQ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 242 | void __lockfunc _raw_read_lock_irq(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 243 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 244 | __raw_read_lock_irq(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 245 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 246 | EXPORT_SYMBOL(_raw_read_lock_irq); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 247 | #endif |
| 248 | |
| 249 | #ifndef CONFIG_INLINE_READ_LOCK_BH |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 250 | void __lockfunc _raw_read_lock_bh(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 251 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 252 | __raw_read_lock_bh(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 253 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 254 | EXPORT_SYMBOL(_raw_read_lock_bh); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 255 | #endif |
| 256 | |
| 257 | #ifndef CONFIG_INLINE_READ_UNLOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 258 | void __lockfunc _raw_read_unlock(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 259 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 260 | __raw_read_unlock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 261 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 262 | EXPORT_SYMBOL(_raw_read_unlock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 263 | #endif |
| 264 | |
| 265 | #ifndef CONFIG_INLINE_READ_UNLOCK_IRQRESTORE |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 266 | void __lockfunc _raw_read_unlock_irqrestore(rwlock_t *lock, unsigned long flags) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 267 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 268 | __raw_read_unlock_irqrestore(lock, flags); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 269 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 270 | EXPORT_SYMBOL(_raw_read_unlock_irqrestore); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 271 | #endif |
| 272 | |
| 273 | #ifndef CONFIG_INLINE_READ_UNLOCK_IRQ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 274 | void __lockfunc _raw_read_unlock_irq(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 275 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 276 | __raw_read_unlock_irq(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 277 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 278 | EXPORT_SYMBOL(_raw_read_unlock_irq); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 279 | #endif |
| 280 | |
| 281 | #ifndef CONFIG_INLINE_READ_UNLOCK_BH |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 282 | void __lockfunc _raw_read_unlock_bh(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 283 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 284 | __raw_read_unlock_bh(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 285 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 286 | EXPORT_SYMBOL(_raw_read_unlock_bh); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 287 | #endif |
| 288 | |
| 289 | #ifndef CONFIG_INLINE_WRITE_TRYLOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 290 | int __lockfunc _raw_write_trylock(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 291 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 292 | return __raw_write_trylock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 293 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 294 | EXPORT_SYMBOL(_raw_write_trylock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 295 | #endif |
| 296 | |
| 297 | #ifndef CONFIG_INLINE_WRITE_LOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 298 | void __lockfunc _raw_write_lock(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 299 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 300 | __raw_write_lock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 301 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 302 | EXPORT_SYMBOL(_raw_write_lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 303 | #endif |
| 304 | |
| 305 | #ifndef CONFIG_INLINE_WRITE_LOCK_IRQSAVE |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 306 | unsigned long __lockfunc _raw_write_lock_irqsave(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 307 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 308 | return __raw_write_lock_irqsave(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 309 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 310 | EXPORT_SYMBOL(_raw_write_lock_irqsave); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 311 | #endif |
| 312 | |
| 313 | #ifndef CONFIG_INLINE_WRITE_LOCK_IRQ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 314 | void __lockfunc _raw_write_lock_irq(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 315 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 316 | __raw_write_lock_irq(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 317 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 318 | EXPORT_SYMBOL(_raw_write_lock_irq); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 319 | #endif |
| 320 | |
| 321 | #ifndef CONFIG_INLINE_WRITE_LOCK_BH |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 322 | void __lockfunc _raw_write_lock_bh(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 323 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 324 | __raw_write_lock_bh(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 325 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 326 | EXPORT_SYMBOL(_raw_write_lock_bh); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 327 | #endif |
| 328 | |
| 329 | #ifndef CONFIG_INLINE_WRITE_UNLOCK |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 330 | void __lockfunc _raw_write_unlock(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 331 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 332 | __raw_write_unlock(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 333 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 334 | EXPORT_SYMBOL(_raw_write_unlock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 335 | #endif |
| 336 | |
| 337 | #ifndef CONFIG_INLINE_WRITE_UNLOCK_IRQRESTORE |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 338 | void __lockfunc _raw_write_unlock_irqrestore(rwlock_t *lock, unsigned long flags) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 339 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 340 | __raw_write_unlock_irqrestore(lock, flags); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 341 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 342 | EXPORT_SYMBOL(_raw_write_unlock_irqrestore); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 343 | #endif |
| 344 | |
| 345 | #ifndef CONFIG_INLINE_WRITE_UNLOCK_IRQ |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 346 | void __lockfunc _raw_write_unlock_irq(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 347 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 348 | __raw_write_unlock_irq(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 349 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 350 | EXPORT_SYMBOL(_raw_write_unlock_irq); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 351 | #endif |
| 352 | |
| 353 | #ifndef CONFIG_INLINE_WRITE_UNLOCK_BH |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 354 | void __lockfunc _raw_write_unlock_bh(rwlock_t *lock) |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 355 | { |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 356 | __raw_write_unlock_bh(lock); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 357 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 358 | EXPORT_SYMBOL(_raw_write_unlock_bh); |
Thomas Gleixner | b7b40ad | 2009-11-09 21:01:59 +0100 | [diff] [blame] | 359 | #endif |
| 360 | |
Thomas Gleixner | 8282947 | 2021-08-15 23:28:28 +0200 | [diff] [blame] | 361 | #endif /* !CONFIG_PREEMPT_RT */ |
| 362 | |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 363 | #ifdef CONFIG_DEBUG_LOCK_ALLOC |
| 364 | |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 365 | void __lockfunc _raw_spin_lock_nested(raw_spinlock_t *lock, int subclass) |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 366 | { |
| 367 | preempt_disable(); |
| 368 | spin_acquire(&lock->dep_map, subclass, 0, _RET_IP_); |
Thomas Gleixner | 9828ea9 | 2009-12-03 20:55:53 +0100 | [diff] [blame] | 369 | LOCK_CONTENDED(lock, do_raw_spin_trylock, do_raw_spin_lock); |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 370 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 371 | EXPORT_SYMBOL(_raw_spin_lock_nested); |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 372 | |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 373 | unsigned long __lockfunc _raw_spin_lock_irqsave_nested(raw_spinlock_t *lock, |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 374 | int subclass) |
| 375 | { |
| 376 | unsigned long flags; |
| 377 | |
| 378 | local_irq_save(flags); |
| 379 | preempt_disable(); |
| 380 | spin_acquire(&lock->dep_map, subclass, 0, _RET_IP_); |
Arnd Bergmann | f98a3dc | 2021-10-22 13:59:38 +0200 | [diff] [blame] | 381 | LOCK_CONTENDED(lock, do_raw_spin_trylock, do_raw_spin_lock); |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 382 | return flags; |
| 383 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 384 | EXPORT_SYMBOL(_raw_spin_lock_irqsave_nested); |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 385 | |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 386 | void __lockfunc _raw_spin_lock_nest_lock(raw_spinlock_t *lock, |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 387 | struct lockdep_map *nest_lock) |
| 388 | { |
| 389 | preempt_disable(); |
| 390 | spin_acquire_nest(&lock->dep_map, 0, 0, nest_lock, _RET_IP_); |
Thomas Gleixner | 9828ea9 | 2009-12-03 20:55:53 +0100 | [diff] [blame] | 391 | LOCK_CONTENDED(lock, do_raw_spin_trylock, do_raw_spin_lock); |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 392 | } |
Thomas Gleixner | 9c1721a | 2009-12-03 21:52:18 +0100 | [diff] [blame] | 393 | EXPORT_SYMBOL(_raw_spin_lock_nest_lock); |
Thomas Gleixner | 8e13c7b | 2009-11-09 15:21:41 +0000 | [diff] [blame] | 394 | |
| 395 | #endif |
| 396 | |
Steven Rostedt | 0764d23 | 2008-05-12 21:20:44 +0200 | [diff] [blame] | 397 | notrace int in_lock_functions(unsigned long addr) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 398 | { |
| 399 | /* Linker adds these: start and end of __lockfunc functions */ |
| 400 | extern char __lock_text_start[], __lock_text_end[]; |
| 401 | |
| 402 | return addr >= (unsigned long)__lock_text_start |
| 403 | && addr < (unsigned long)__lock_text_end; |
| 404 | } |
| 405 | EXPORT_SYMBOL(in_lock_functions); |