Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu> |
| 4 | */ |
| 5 | #ifndef _ASM_POWERPC_SWITCH_TO_H |
| 6 | #define _ASM_POWERPC_SWITCH_TO_H |
| 7 | |
Nicholas Piggin | 68b3458 | 2020-02-26 03:35:34 +1000 | [diff] [blame] | 8 | #include <linux/sched.h> |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 9 | #include <asm/reg.h> |
| 10 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 11 | struct thread_struct; |
| 12 | struct task_struct; |
| 13 | struct pt_regs; |
| 14 | |
| 15 | extern struct task_struct *__switch_to(struct task_struct *, |
| 16 | struct task_struct *); |
| 17 | #define switch_to(prev, next, last) ((last) = __switch_to((prev), (next))) |
| 18 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 19 | extern struct task_struct *_switch(struct thread_struct *prev, |
| 20 | struct thread_struct *next); |
| 21 | |
Scott Wood | f5f9721 | 2013-11-22 15:52:29 -0600 | [diff] [blame] | 22 | extern void switch_booke_debug_regs(struct debug_reg *new_debug); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 23 | |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame] | 24 | extern int emulate_altivec(struct pt_regs *); |
| 25 | |
Nicholas Piggin | 6cc0c16 | 2020-02-26 03:35:37 +1000 | [diff] [blame] | 26 | #ifdef CONFIG_PPC_BOOK3S_64 |
Nicholas Piggin | 68b3458 | 2020-02-26 03:35:34 +1000 | [diff] [blame] | 27 | void restore_math(struct pt_regs *regs); |
Nicholas Piggin | 6cc0c16 | 2020-02-26 03:35:37 +1000 | [diff] [blame] | 28 | #else |
| 29 | static inline void restore_math(struct pt_regs *regs) |
| 30 | { |
| 31 | } |
| 32 | #endif |
Nicholas Piggin | 68b3458 | 2020-02-26 03:35:34 +1000 | [diff] [blame] | 33 | |
| 34 | void restore_tm_state(struct pt_regs *regs); |
| 35 | |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame] | 36 | extern void flush_all_to_thread(struct task_struct *); |
| 37 | extern void giveup_all(struct task_struct *); |
| 38 | |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 39 | #ifdef CONFIG_PPC_FPU |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame] | 40 | extern void enable_kernel_fp(void); |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 41 | extern void flush_fp_to_thread(struct task_struct *); |
Kevin Hao | 5f20be44 | 2013-07-14 17:02:06 +0800 | [diff] [blame] | 42 | extern void giveup_fpu(struct task_struct *); |
Cyril Bur | 8792468 | 2016-02-29 17:53:49 +1100 | [diff] [blame] | 43 | extern void save_fpu(struct task_struct *); |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 44 | static inline void disable_kernel_fp(void) |
| 45 | { |
| 46 | msr_check_and_clear(MSR_FP); |
| 47 | } |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 48 | #else |
Cyril Bur | 8792468 | 2016-02-29 17:53:49 +1100 | [diff] [blame] | 49 | static inline void save_fpu(struct task_struct *t) { } |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 50 | static inline void flush_fp_to_thread(struct task_struct *t) { } |
| 51 | #endif |
| 52 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 53 | #ifdef CONFIG_ALTIVEC |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame] | 54 | extern void enable_kernel_altivec(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 55 | extern void flush_altivec_to_thread(struct task_struct *); |
Anton Blanchard | 8cd3c23 | 2012-04-15 20:54:59 +0000 | [diff] [blame] | 56 | extern void giveup_altivec(struct task_struct *); |
Cyril Bur | 6f515d8 | 2016-02-29 17:53:50 +1100 | [diff] [blame] | 57 | extern void save_altivec(struct task_struct *); |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 58 | static inline void disable_kernel_altivec(void) |
| 59 | { |
| 60 | msr_check_and_clear(MSR_VEC); |
| 61 | } |
Cyril Bur | de2a20a | 2016-02-29 17:53:48 +1100 | [diff] [blame] | 62 | #else |
Cyril Bur | 6f515d8 | 2016-02-29 17:53:50 +1100 | [diff] [blame] | 63 | static inline void save_altivec(struct task_struct *t) { } |
Cyril Bur | de2a20a | 2016-02-29 17:53:48 +1100 | [diff] [blame] | 64 | static inline void __giveup_altivec(struct task_struct *t) { } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 65 | #endif |
| 66 | |
| 67 | #ifdef CONFIG_VSX |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame] | 68 | extern void enable_kernel_vsx(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 69 | extern void flush_vsx_to_thread(struct task_struct *); |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 70 | static inline void disable_kernel_vsx(void) |
| 71 | { |
| 72 | msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); |
| 73 | } |
Christophe Leroy | bd73758 | 2021-03-09 08:39:39 +0000 | [diff] [blame] | 74 | #else |
| 75 | static inline void enable_kernel_vsx(void) |
| 76 | { |
| 77 | BUILD_BUG(); |
| 78 | } |
| 79 | |
| 80 | static inline void disable_kernel_vsx(void) |
| 81 | { |
| 82 | BUILD_BUG(); |
| 83 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 84 | #endif |
| 85 | |
| 86 | #ifdef CONFIG_SPE |
Anton Blanchard | d1e1cf2 | 2015-10-29 11:44:11 +1100 | [diff] [blame] | 87 | extern void enable_kernel_spe(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 88 | extern void flush_spe_to_thread(struct task_struct *); |
Anton Blanchard | 98da581 | 2015-10-29 11:44:01 +1100 | [diff] [blame] | 89 | extern void giveup_spe(struct task_struct *); |
| 90 | extern void __giveup_spe(struct task_struct *); |
Anton Blanchard | 3eb5d58 | 2015-10-29 11:44:06 +1100 | [diff] [blame] | 91 | static inline void disable_kernel_spe(void) |
| 92 | { |
| 93 | msr_check_and_clear(MSR_SPE); |
| 94 | } |
Cyril Bur | de2a20a | 2016-02-29 17:53:48 +1100 | [diff] [blame] | 95 | #else |
| 96 | static inline void __giveup_spe(struct task_struct *t) { } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 97 | #endif |
| 98 | |
Michael Ellerman | 330a1eb | 2013-06-28 18:15:16 +1000 | [diff] [blame] | 99 | static inline void clear_task_ebb(struct task_struct *t) |
| 100 | { |
| 101 | #ifdef CONFIG_PPC_BOOK3S_64 |
| 102 | /* EBB perf events are not inherited, so clear all EBB state. */ |
Michael Ellerman | 3df48c9 | 2014-06-10 16:46:21 +1000 | [diff] [blame] | 103 | t->thread.ebbrr = 0; |
| 104 | t->thread.ebbhr = 0; |
Michael Ellerman | 330a1eb | 2013-06-28 18:15:16 +1000 | [diff] [blame] | 105 | t->thread.bescr = 0; |
| 106 | t->thread.mmcr2 = 0; |
| 107 | t->thread.mmcr0 = 0; |
| 108 | t->thread.siar = 0; |
| 109 | t->thread.sdar = 0; |
| 110 | t->thread.sier = 0; |
| 111 | t->thread.used_ebb = 0; |
| 112 | #endif |
| 113 | } |
| 114 | |
Sukadev Bhattiprolu | ec233ed | 2017-11-07 18:23:53 -0800 | [diff] [blame] | 115 | extern int set_thread_tidr(struct task_struct *t); |
Sukadev Bhattiprolu | ec233ed | 2017-11-07 18:23:53 -0800 | [diff] [blame] | 116 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 117 | #endif /* _ASM_POWERPC_SWITCH_TO_H */ |