David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu> |
| 3 | */ |
| 4 | #ifndef _ASM_POWERPC_SWITCH_TO_H |
| 5 | #define _ASM_POWERPC_SWITCH_TO_H |
| 6 | |
| 7 | struct thread_struct; |
| 8 | struct task_struct; |
| 9 | struct pt_regs; |
| 10 | |
| 11 | extern struct task_struct *__switch_to(struct task_struct *, |
| 12 | struct task_struct *); |
| 13 | #define switch_to(prev, next, last) ((last) = __switch_to((prev), (next))) |
| 14 | |
| 15 | struct thread_struct; |
| 16 | extern struct task_struct *_switch(struct thread_struct *prev, |
| 17 | struct thread_struct *next); |
Michael Neuling | c2d5264 | 2013-08-09 17:29:30 +1000 | [diff] [blame] | 18 | #ifdef CONFIG_PPC_BOOK3S_64 |
Sam bobroff | 96d0161 | 2014-06-05 16:19:22 +1000 | [diff] [blame] | 19 | static inline void save_early_sprs(struct thread_struct *prev) |
Michael Neuling | c2d5264 | 2013-08-09 17:29:30 +1000 | [diff] [blame] | 20 | { |
| 21 | if (cpu_has_feature(CPU_FTR_ARCH_207S)) |
| 22 | prev->tar = mfspr(SPRN_TAR); |
Sam bobroff | 96d0161 | 2014-06-05 16:19:22 +1000 | [diff] [blame] | 23 | if (cpu_has_feature(CPU_FTR_DSCR)) |
| 24 | prev->dscr = mfspr(SPRN_DSCR); |
Michael Neuling | c2d5264 | 2013-08-09 17:29:30 +1000 | [diff] [blame] | 25 | } |
| 26 | #else |
Sam bobroff | 96d0161 | 2014-06-05 16:19:22 +1000 | [diff] [blame] | 27 | static inline void save_early_sprs(struct thread_struct *prev) {} |
Michael Neuling | c2d5264 | 2013-08-09 17:29:30 +1000 | [diff] [blame] | 28 | #endif |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 29 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 30 | extern void enable_kernel_fp(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 31 | extern void enable_kernel_altivec(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 32 | extern int emulate_altivec(struct pt_regs *); |
| 33 | extern void __giveup_vsx(struct task_struct *); |
| 34 | extern void giveup_vsx(struct task_struct *); |
| 35 | extern void enable_kernel_spe(void); |
| 36 | extern void giveup_spe(struct task_struct *); |
| 37 | extern void load_up_spe(struct task_struct *); |
Scott Wood | f5f9721 | 2013-11-22 15:52:29 -0600 | [diff] [blame] | 38 | extern void switch_booke_debug_regs(struct debug_reg *new_debug); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 39 | |
| 40 | #ifndef CONFIG_SMP |
| 41 | extern void discard_lazy_cpu_state(void); |
| 42 | #else |
| 43 | static inline void discard_lazy_cpu_state(void) |
| 44 | { |
| 45 | } |
| 46 | #endif |
| 47 | |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 48 | #ifdef CONFIG_PPC_FPU |
| 49 | extern void flush_fp_to_thread(struct task_struct *); |
Kevin Hao | 5f20be4 | 2013-07-14 17:02:06 +0800 | [diff] [blame] | 50 | extern void giveup_fpu(struct task_struct *); |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 51 | #else |
| 52 | static inline void flush_fp_to_thread(struct task_struct *t) { } |
Kevin Hao | 5f20be4 | 2013-07-14 17:02:06 +0800 | [diff] [blame] | 53 | static inline void giveup_fpu(struct task_struct *t) { } |
Kevin Hao | 037f0ee | 2013-07-14 17:02:05 +0800 | [diff] [blame] | 54 | #endif |
| 55 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 56 | #ifdef CONFIG_ALTIVEC |
| 57 | extern void flush_altivec_to_thread(struct task_struct *); |
Anton Blanchard | 8cd3c23 | 2012-04-15 20:54:59 +0000 | [diff] [blame] | 58 | extern void giveup_altivec(struct task_struct *); |
Anton Blanchard | 3500087 | 2012-04-15 20:56:45 +0000 | [diff] [blame] | 59 | extern void giveup_altivec_notask(void); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 60 | #else |
| 61 | static inline void flush_altivec_to_thread(struct task_struct *t) |
| 62 | { |
| 63 | } |
Anton Blanchard | 8cd3c23 | 2012-04-15 20:54:59 +0000 | [diff] [blame] | 64 | static inline void giveup_altivec(struct task_struct *t) |
| 65 | { |
| 66 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 67 | #endif |
| 68 | |
| 69 | #ifdef CONFIG_VSX |
| 70 | extern void flush_vsx_to_thread(struct task_struct *); |
| 71 | #else |
| 72 | static inline void flush_vsx_to_thread(struct task_struct *t) |
| 73 | { |
| 74 | } |
| 75 | #endif |
| 76 | |
| 77 | #ifdef CONFIG_SPE |
| 78 | extern void flush_spe_to_thread(struct task_struct *); |
| 79 | #else |
| 80 | static inline void flush_spe_to_thread(struct task_struct *t) |
| 81 | { |
| 82 | } |
| 83 | #endif |
| 84 | |
Michael Ellerman | 330a1eb | 2013-06-28 18:15:16 +1000 | [diff] [blame] | 85 | static inline void clear_task_ebb(struct task_struct *t) |
| 86 | { |
| 87 | #ifdef CONFIG_PPC_BOOK3S_64 |
| 88 | /* EBB perf events are not inherited, so clear all EBB state. */ |
Michael Ellerman | 3df48c9 | 2014-06-10 16:46:21 +1000 | [diff] [blame^] | 89 | t->thread.ebbrr = 0; |
| 90 | t->thread.ebbhr = 0; |
Michael Ellerman | 330a1eb | 2013-06-28 18:15:16 +1000 | [diff] [blame] | 91 | t->thread.bescr = 0; |
| 92 | t->thread.mmcr2 = 0; |
| 93 | t->thread.mmcr0 = 0; |
| 94 | t->thread.siar = 0; |
| 95 | t->thread.sdar = 0; |
| 96 | t->thread.sier = 0; |
| 97 | t->thread.used_ebb = 0; |
| 98 | #endif |
| 99 | } |
| 100 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 101 | #endif /* _ASM_POWERPC_SWITCH_TO_H */ |