Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 4 | */ |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 5 | #ifndef _ASM_POWERPC_HW_IRQ_H |
| 6 | #define _ASM_POWERPC_HW_IRQ_H |
| 7 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | #ifdef __KERNEL__ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 10 | #include <linux/errno.h> |
Paul Mackerras | d04c56f | 2006-10-04 16:47:49 +1000 | [diff] [blame] | 11 | #include <linux/compiler.h> |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 12 | #include <asm/ptrace.h> |
| 13 | #include <asm/processor.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 14 | |
Benjamin Herrenschmidt | 7230c56 | 2012-03-06 18:27:59 +1100 | [diff] [blame] | 15 | #ifdef CONFIG_PPC64 |
| 16 | |
| 17 | /* |
| 18 | * PACA flags in paca->irq_happened. |
| 19 | * |
| 20 | * This bits are set when interrupts occur while soft-disabled |
| 21 | * and allow a proper replay. Additionally, PACA_IRQ_HARD_DIS |
| 22 | * is set whenever we manually hard disable. |
| 23 | */ |
| 24 | #define PACA_IRQ_HARD_DIS 0x01 |
| 25 | #define PACA_IRQ_DBELL 0x02 |
| 26 | #define PACA_IRQ_EE 0x04 |
| 27 | #define PACA_IRQ_DEC 0x08 /* Or FIT */ |
| 28 | #define PACA_IRQ_EE_EDGE 0x10 /* BookE only */ |
Mahesh Salgaonkar | 0869b6f | 2014-07-29 18:40:01 +0530 | [diff] [blame] | 29 | #define PACA_IRQ_HMI 0x20 |
Madhavan Srinivasan | f442d00 | 2017-12-20 09:25:53 +0530 | [diff] [blame] | 30 | #define PACA_IRQ_PMI 0x40 |
Benjamin Herrenschmidt | 7230c56 | 2012-03-06 18:27:59 +1100 | [diff] [blame] | 31 | |
Madhavan Srinivasan | c2e480b | 2017-12-20 09:25:42 +0530 | [diff] [blame] | 32 | /* |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 33 | * flags for paca->irq_soft_mask |
Madhavan Srinivasan | c2e480b | 2017-12-20 09:25:42 +0530 | [diff] [blame] | 34 | */ |
Madhavan Srinivasan | 01417c6 | 2017-12-20 09:25:49 +0530 | [diff] [blame] | 35 | #define IRQS_ENABLED 0 |
Madhavan Srinivasan | f442d00 | 2017-12-20 09:25:53 +0530 | [diff] [blame] | 36 | #define IRQS_DISABLED 1 /* local_irq_disable() interrupts */ |
| 37 | #define IRQS_PMI_DISABLED 2 |
| 38 | #define IRQS_ALL_DISABLED (IRQS_DISABLED | IRQS_PMI_DISABLED) |
Madhavan Srinivasan | c2e480b | 2017-12-20 09:25:42 +0530 | [diff] [blame] | 39 | |
Benjamin Herrenschmidt | 7230c56 | 2012-03-06 18:27:59 +1100 | [diff] [blame] | 40 | #endif /* CONFIG_PPC64 */ |
| 41 | |
| 42 | #ifndef __ASSEMBLY__ |
| 43 | |
Nicholas Piggin | 6de6638 | 2017-11-05 23:33:55 +1100 | [diff] [blame] | 44 | extern void replay_system_reset(void); |
Benjamin Herrenschmidt | 7230c56 | 2012-03-06 18:27:59 +1100 | [diff] [blame] | 45 | extern void __replay_interrupt(unsigned int vector); |
| 46 | |
Kumar Gala | c7aeffc | 2005-09-19 09:30:27 -0500 | [diff] [blame] | 47 | extern void timer_interrupt(struct pt_regs *); |
Alexander Graf | 7cc1e8e | 2012-02-22 16:26:34 +0100 | [diff] [blame] | 48 | extern void performance_monitor_exception(struct pt_regs *regs); |
Bharat Bhushan | 6328e59 | 2012-06-20 05:56:53 +0000 | [diff] [blame] | 49 | extern void WatchdogException(struct pt_regs *regs); |
| 50 | extern void unknown_exception(struct pt_regs *regs); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 51 | |
Paul Mackerras | d04c56f | 2006-10-04 16:47:49 +1000 | [diff] [blame] | 52 | #ifdef CONFIG_PPC64 |
| 53 | #include <asm/paca.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 54 | |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 55 | static inline notrace unsigned long irq_soft_mask_return(void) |
Madhavan Srinivasan | e0b5687 | 2017-12-20 09:25:46 +0530 | [diff] [blame] | 56 | { |
| 57 | unsigned long flags; |
| 58 | |
| 59 | asm volatile( |
| 60 | "lbz %0,%1(13)" |
| 61 | : "=r" (flags) |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 62 | : "i" (offsetof(struct paca_struct, irq_soft_mask))); |
Madhavan Srinivasan | e0b5687 | 2017-12-20 09:25:46 +0530 | [diff] [blame] | 63 | |
| 64 | return flags; |
| 65 | } |
| 66 | |
Madhavan Srinivasan | 0b63acf | 2017-12-20 09:25:45 +0530 | [diff] [blame] | 67 | /* |
| 68 | * The "memory" clobber acts as both a compiler barrier |
| 69 | * for the critical section and as a clobber because |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 70 | * we changed paca->irq_soft_mask |
Madhavan Srinivasan | 0b63acf | 2017-12-20 09:25:45 +0530 | [diff] [blame] | 71 | */ |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 72 | static inline notrace void irq_soft_mask_set(unsigned long mask) |
Madhavan Srinivasan | 0b63acf | 2017-12-20 09:25:45 +0530 | [diff] [blame] | 73 | { |
Madhavan Srinivasan | 9aa8818 | 2017-12-20 09:25:54 +0530 | [diff] [blame] | 74 | #ifdef CONFIG_PPC_IRQ_SOFT_MASK_DEBUG |
Madhavan Srinivasan | 01417c6 | 2017-12-20 09:25:49 +0530 | [diff] [blame] | 75 | /* |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 76 | * The irq mask must always include the STD bit if any are set. |
| 77 | * |
| 78 | * and interrupts don't get replayed until the standard |
| 79 | * interrupt (local_irq_disable()) is unmasked. |
| 80 | * |
| 81 | * Other masks must only provide additional masking beyond |
| 82 | * the standard, and they are also not replayed until the |
| 83 | * standard interrupt becomes unmasked. |
| 84 | * |
| 85 | * This could be changed, but it will require partial |
| 86 | * unmasks to be replayed, among other things. For now, take |
| 87 | * the simple approach. |
Madhavan Srinivasan | 01417c6 | 2017-12-20 09:25:49 +0530 | [diff] [blame] | 88 | */ |
| 89 | WARN_ON(mask && !(mask & IRQS_DISABLED)); |
| 90 | #endif |
| 91 | |
Madhavan Srinivasan | 0b63acf | 2017-12-20 09:25:45 +0530 | [diff] [blame] | 92 | asm volatile( |
| 93 | "stb %0,%1(13)" |
| 94 | : |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 95 | : "r" (mask), |
| 96 | "i" (offsetof(struct paca_struct, irq_soft_mask)) |
Madhavan Srinivasan | 0b63acf | 2017-12-20 09:25:45 +0530 | [diff] [blame] | 97 | : "memory"); |
| 98 | } |
| 99 | |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 100 | static inline notrace unsigned long irq_soft_mask_set_return(unsigned long mask) |
Madhavan Srinivasan | a67c543 | 2017-12-20 09:25:47 +0530 | [diff] [blame] | 101 | { |
| 102 | unsigned long flags; |
| 103 | |
Madhavan Srinivasan | 9aa8818 | 2017-12-20 09:25:54 +0530 | [diff] [blame] | 104 | #ifdef CONFIG_PPC_IRQ_SOFT_MASK_DEBUG |
Madhavan Srinivasan | 01417c6 | 2017-12-20 09:25:49 +0530 | [diff] [blame] | 105 | WARN_ON(mask && !(mask & IRQS_DISABLED)); |
| 106 | #endif |
| 107 | |
Madhavan Srinivasan | a67c543 | 2017-12-20 09:25:47 +0530 | [diff] [blame] | 108 | asm volatile( |
| 109 | "lbz %0,%1(13); stb %2,%1(13)" |
| 110 | : "=&r" (flags) |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 111 | : "i" (offsetof(struct paca_struct, irq_soft_mask)), |
Madhavan Srinivasan | 01417c6 | 2017-12-20 09:25:49 +0530 | [diff] [blame] | 112 | "r" (mask) |
Madhavan Srinivasan | a67c543 | 2017-12-20 09:25:47 +0530 | [diff] [blame] | 113 | : "memory"); |
| 114 | |
| 115 | return flags; |
| 116 | } |
| 117 | |
Madhavan Srinivasan | c642438 | 2017-12-20 09:25:55 +0530 | [diff] [blame^] | 118 | static inline notrace unsigned long irq_soft_mask_or_return(unsigned long mask) |
| 119 | { |
| 120 | unsigned long flags, tmp; |
| 121 | |
| 122 | asm volatile( |
| 123 | "lbz %0,%2(13); or %1,%0,%3; stb %1,%2(13)" |
| 124 | : "=&r" (flags), "=r" (tmp) |
| 125 | : "i" (offsetof(struct paca_struct, irq_soft_mask)), |
| 126 | "r" (mask) |
| 127 | : "memory"); |
| 128 | |
| 129 | #ifdef CONFIG_PPC_IRQ_SOFT_MASK_DEBUG |
| 130 | WARN_ON((mask | flags) && !((mask | flags) & IRQS_DISABLED)); |
| 131 | #endif |
| 132 | |
| 133 | return flags; |
| 134 | } |
| 135 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 136 | static inline unsigned long arch_local_save_flags(void) |
Paul Mackerras | d04c56f | 2006-10-04 16:47:49 +1000 | [diff] [blame] | 137 | { |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 138 | return irq_soft_mask_return(); |
Paul Mackerras | d04c56f | 2006-10-04 16:47:49 +1000 | [diff] [blame] | 139 | } |
| 140 | |
Madhavan Srinivasan | b5c1bd6 | 2017-12-20 09:25:44 +0530 | [diff] [blame] | 141 | static inline void arch_local_irq_disable(void) |
Paul Mackerras | d04c56f | 2006-10-04 16:47:49 +1000 | [diff] [blame] | 142 | { |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 143 | irq_soft_mask_set(IRQS_DISABLED); |
Paul Mackerras | d04c56f | 2006-10-04 16:47:49 +1000 | [diff] [blame] | 144 | } |
| 145 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 146 | extern void arch_local_irq_restore(unsigned long); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 147 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 148 | static inline void arch_local_irq_enable(void) |
| 149 | { |
Madhavan Srinivasan | c2e480b | 2017-12-20 09:25:42 +0530 | [diff] [blame] | 150 | arch_local_irq_restore(IRQS_ENABLED); |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 151 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 152 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 153 | static inline unsigned long arch_local_irq_save(void) |
| 154 | { |
Madhavan Srinivasan | 4e26bc4 | 2017-12-20 09:25:50 +0530 | [diff] [blame] | 155 | return irq_soft_mask_set_return(IRQS_DISABLED); |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 156 | } |
| 157 | |
| 158 | static inline bool arch_irqs_disabled_flags(unsigned long flags) |
| 159 | { |
Madhavan Srinivasan | 01417c6 | 2017-12-20 09:25:49 +0530 | [diff] [blame] | 160 | return flags & IRQS_DISABLED; |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 161 | } |
| 162 | |
| 163 | static inline bool arch_irqs_disabled(void) |
| 164 | { |
| 165 | return arch_irqs_disabled_flags(arch_local_save_flags()); |
| 166 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 167 | |
Madhavan Srinivasan | c642438 | 2017-12-20 09:25:55 +0530 | [diff] [blame^] | 168 | #ifdef CONFIG_PPC_BOOK3S |
| 169 | /* |
| 170 | * To support disabling and enabling of irq with PMI, set of |
| 171 | * new powerpc_local_irq_pmu_save() and powerpc_local_irq_restore() |
| 172 | * functions are added. These macros are implemented using generic |
| 173 | * linux local_irq_* code from include/linux/irqflags.h. |
| 174 | */ |
| 175 | #define raw_local_irq_pmu_save(flags) \ |
| 176 | do { \ |
| 177 | typecheck(unsigned long, flags); \ |
| 178 | flags = irq_soft_mask_or_return(IRQS_DISABLED | \ |
| 179 | IRQS_PMI_DISABLED); \ |
| 180 | } while(0) |
| 181 | |
| 182 | #define raw_local_irq_pmu_restore(flags) \ |
| 183 | do { \ |
| 184 | typecheck(unsigned long, flags); \ |
| 185 | arch_local_irq_restore(flags); \ |
| 186 | } while(0) |
| 187 | |
| 188 | #ifdef CONFIG_TRACE_IRQFLAGS |
| 189 | #define powerpc_local_irq_pmu_save(flags) \ |
| 190 | do { \ |
| 191 | raw_local_irq_pmu_save(flags); \ |
| 192 | trace_hardirqs_off(); \ |
| 193 | } while(0) |
| 194 | #define powerpc_local_irq_pmu_restore(flags) \ |
| 195 | do { \ |
| 196 | if (raw_irqs_disabled_flags(flags)) { \ |
| 197 | raw_local_irq_pmu_restore(flags); \ |
| 198 | trace_hardirqs_off(); \ |
| 199 | } else { \ |
| 200 | trace_hardirqs_on(); \ |
| 201 | raw_local_irq_pmu_restore(flags); \ |
| 202 | } \ |
| 203 | } while(0) |
| 204 | #else |
| 205 | #define powerpc_local_irq_pmu_save(flags) \ |
| 206 | do { \ |
| 207 | raw_local_irq_pmu_save(flags); \ |
| 208 | } while(0) |
| 209 | #define powerpc_local_irq_pmu_restore(flags) \ |
| 210 | do { \ |
| 211 | raw_local_irq_pmu_restore(flags); \ |
| 212 | } while (0) |
| 213 | #endif /* CONFIG_TRACE_IRQFLAGS */ |
| 214 | |
| 215 | #endif /* CONFIG_PPC_BOOK3S */ |
| 216 | |
Benjamin Herrenschmidt | 2d27cfd | 2009-07-23 23:15:59 +0000 | [diff] [blame] | 217 | #ifdef CONFIG_PPC_BOOK3E |
Benjamin Herrenschmidt | 21b2de3 | 2012-07-10 18:37:56 +1000 | [diff] [blame] | 218 | #define __hard_irq_enable() asm volatile("wrteei 1" : : : "memory") |
| 219 | #define __hard_irq_disable() asm volatile("wrteei 0" : : : "memory") |
Benjamin Herrenschmidt | 2d27cfd | 2009-07-23 23:15:59 +0000 | [diff] [blame] | 220 | #else |
Benjamin Herrenschmidt | d9ada91 | 2012-03-02 11:33:52 +1100 | [diff] [blame] | 221 | #define __hard_irq_enable() __mtmsrd(local_paca->kernel_msr | MSR_EE, 1) |
| 222 | #define __hard_irq_disable() __mtmsrd(local_paca->kernel_msr, 1) |
Benjamin Herrenschmidt | 2d27cfd | 2009-07-23 23:15:59 +0000 | [diff] [blame] | 223 | #endif |
Benjamin Herrenschmidt | e1fa2e1 | 2007-05-10 22:22:45 -0700 | [diff] [blame] | 224 | |
Madhavan Srinivasan | f442d00 | 2017-12-20 09:25:53 +0530 | [diff] [blame] | 225 | #define hard_irq_disable() do { \ |
| 226 | unsigned long flags; \ |
| 227 | __hard_irq_disable(); \ |
| 228 | flags = irq_soft_mask_set_return(IRQS_ALL_DISABLED); \ |
| 229 | local_paca->irq_happened |= PACA_IRQ_HARD_DIS; \ |
| 230 | if (!arch_irqs_disabled_flags(flags)) \ |
| 231 | trace_hardirqs_off(); \ |
Benjamin Herrenschmidt | 5737789 | 2013-05-06 21:04:02 +0000 | [diff] [blame] | 232 | } while(0) |
Paul Mackerras | f948501 | 2012-06-15 14:51:39 +1000 | [diff] [blame] | 233 | |
Anton Blanchard | 0b17ba7 | 2012-06-27 13:13:52 +0000 | [diff] [blame] | 234 | static inline bool lazy_irq_pending(void) |
| 235 | { |
| 236 | return !!(get_paca()->irq_happened & ~PACA_IRQ_HARD_DIS); |
| 237 | } |
| 238 | |
Benjamin Herrenschmidt | 7230c56 | 2012-03-06 18:27:59 +1100 | [diff] [blame] | 239 | /* |
| 240 | * This is called by asynchronous interrupts to conditionally |
| 241 | * re-enable hard interrupts when soft-disabled after having |
| 242 | * cleared the source of the interrupt |
| 243 | */ |
| 244 | static inline void may_hard_irq_enable(void) |
| 245 | { |
| 246 | get_paca()->irq_happened &= ~PACA_IRQ_HARD_DIS; |
| 247 | if (!(get_paca()->irq_happened & PACA_IRQ_EE)) |
| 248 | __hard_irq_enable(); |
| 249 | } |
Paul Mackerras | d04c56f | 2006-10-04 16:47:49 +1000 | [diff] [blame] | 250 | |
Benjamin Herrenschmidt | a546498 | 2012-03-07 16:48:45 +1100 | [diff] [blame] | 251 | static inline bool arch_irq_disabled_regs(struct pt_regs *regs) |
| 252 | { |
Madhavan Srinivasan | 01417c6 | 2017-12-20 09:25:49 +0530 | [diff] [blame] | 253 | return (regs->softe & IRQS_DISABLED); |
Benjamin Herrenschmidt | a546498 | 2012-03-07 16:48:45 +1100 | [diff] [blame] | 254 | } |
| 255 | |
Benjamin Herrenschmidt | be2cf20 | 2012-07-10 18:36:40 +1000 | [diff] [blame] | 256 | extern bool prep_irq_for_idle(void); |
Nicholas Piggin | 2201f99 | 2017-06-13 23:05:45 +1000 | [diff] [blame] | 257 | extern bool prep_irq_for_idle_irqsoff(void); |
Nicholas Piggin | 771d430 | 2017-06-13 23:05:47 +1000 | [diff] [blame] | 258 | extern void irq_set_pending_from_srr1(unsigned long srr1); |
Nicholas Piggin | 2201f99 | 2017-06-13 23:05:45 +1000 | [diff] [blame] | 259 | |
| 260 | #define fini_irq_for_idle_irqsoff() trace_hardirqs_off(); |
Benjamin Herrenschmidt | be2cf20 | 2012-07-10 18:36:40 +1000 | [diff] [blame] | 261 | |
Benjamin Herrenschmidt | 1d607bb | 2016-07-08 16:37:07 +1000 | [diff] [blame] | 262 | extern void force_external_irq_replay(void); |
| 263 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 264 | #else /* CONFIG_PPC64 */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 265 | |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 266 | #define SET_MSR_EE(x) mtmsr(x) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 267 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 268 | static inline unsigned long arch_local_save_flags(void) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 269 | { |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 270 | return mfmsr(); |
| 271 | } |
Paul Mackerras | 4c75f84 | 2009-06-12 02:00:50 +0000 | [diff] [blame] | 272 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 273 | static inline void arch_local_irq_restore(unsigned long flags) |
| 274 | { |
| 275 | #if defined(CONFIG_BOOKE) |
| 276 | asm volatile("wrtee %0" : : "r" (flags) : "memory"); |
| 277 | #else |
| 278 | mtmsr(flags); |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 279 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 280 | } |
| 281 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 282 | static inline unsigned long arch_local_irq_save(void) |
| 283 | { |
| 284 | unsigned long flags = arch_local_save_flags(); |
| 285 | #ifdef CONFIG_BOOKE |
| 286 | asm volatile("wrteei 0" : : : "memory"); |
Christophe Leroy | 834e5a6 | 2016-08-23 15:58:56 +0200 | [diff] [blame] | 287 | #elif defined(CONFIG_PPC_8xx) |
| 288 | wrtspr(SPRN_EID); |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 289 | #else |
| 290 | SET_MSR_EE(flags & ~MSR_EE); |
| 291 | #endif |
| 292 | return flags; |
| 293 | } |
| 294 | |
| 295 | static inline void arch_local_irq_disable(void) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 296 | { |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 297 | #ifdef CONFIG_BOOKE |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 298 | asm volatile("wrteei 0" : : : "memory"); |
Christophe Leroy | 834e5a6 | 2016-08-23 15:58:56 +0200 | [diff] [blame] | 299 | #elif defined(CONFIG_PPC_8xx) |
| 300 | wrtspr(SPRN_EID); |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 301 | #else |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 302 | arch_local_irq_save(); |
| 303 | #endif |
| 304 | } |
Paul Mackerras | 4c75f84 | 2009-06-12 02:00:50 +0000 | [diff] [blame] | 305 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 306 | static inline void arch_local_irq_enable(void) |
| 307 | { |
| 308 | #ifdef CONFIG_BOOKE |
| 309 | asm volatile("wrteei 1" : : : "memory"); |
Christophe Leroy | 834e5a6 | 2016-08-23 15:58:56 +0200 | [diff] [blame] | 310 | #elif defined(CONFIG_PPC_8xx) |
| 311 | wrtspr(SPRN_EIE); |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 312 | #else |
| 313 | unsigned long msr = mfmsr(); |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 314 | SET_MSR_EE(msr | MSR_EE); |
| 315 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 316 | } |
| 317 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 318 | static inline bool arch_irqs_disabled_flags(unsigned long flags) |
Steven Rostedt | e0eca07 | 2008-05-14 23:49:43 -0400 | [diff] [blame] | 319 | { |
| 320 | return (flags & MSR_EE) == 0; |
| 321 | } |
| 322 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 323 | static inline bool arch_irqs_disabled(void) |
| 324 | { |
| 325 | return arch_irqs_disabled_flags(arch_local_save_flags()); |
| 326 | } |
| 327 | |
| 328 | #define hard_irq_disable() arch_local_irq_disable() |
| 329 | |
Benjamin Herrenschmidt | a546498 | 2012-03-07 16:48:45 +1100 | [diff] [blame] | 330 | static inline bool arch_irq_disabled_regs(struct pt_regs *regs) |
| 331 | { |
| 332 | return !(regs->msr & MSR_EE); |
| 333 | } |
| 334 | |
Benjamin Herrenschmidt | 7230c56 | 2012-03-06 18:27:59 +1100 | [diff] [blame] | 335 | static inline void may_hard_irq_enable(void) { } |
| 336 | |
Paul Mackerras | d04c56f | 2006-10-04 16:47:49 +1000 | [diff] [blame] | 337 | #endif /* CONFIG_PPC64 */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 338 | |
Thomas Gleixner | 089fb44 | 2011-01-21 06:12:28 +0000 | [diff] [blame] | 339 | #define ARCH_IRQ_INIT_FLAGS IRQ_NOREQUEST |
| 340 | |
Ingo Molnar | c0ad90a | 2006-06-29 02:24:44 -0700 | [diff] [blame] | 341 | /* |
| 342 | * interrupt-retrigger: should we handle this via lost interrupts and IPIs |
| 343 | * or should we not care like we do now ? --BenH. |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 344 | */ |
Thomas Gleixner | 353bca5 | 2009-03-10 14:46:30 +0000 | [diff] [blame] | 345 | struct irq_chip; |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 346 | |
Benjamin Herrenschmidt | 7230c56 | 2012-03-06 18:27:59 +1100 | [diff] [blame] | 347 | #endif /* __ASSEMBLY__ */ |
Kumar Gala | b671ad2 | 2005-09-21 16:52:55 -0500 | [diff] [blame] | 348 | #endif /* __KERNEL__ */ |
| 349 | #endif /* _ASM_POWERPC_HW_IRQ_H */ |