Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef __HEAD_32_H__ |
| 3 | #define __HEAD_32_H__ |
| 4 | |
| 5 | #include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */ |
| 6 | |
| 7 | /* |
| 8 | * Exception entry code. This code runs with address translation |
| 9 | * turned off, i.e. using physical addresses. |
| 10 | * We assume sprg3 has the physical address of the current |
| 11 | * task's thread_struct. |
| 12 | */ |
| 13 | |
| 14 | .macro EXCEPTION_PROLOG |
| 15 | mtspr SPRN_SPRG_SCRATCH0,r10 |
| 16 | mtspr SPRN_SPRG_SCRATCH1,r11 |
| 17 | mfcr r10 |
| 18 | EXCEPTION_PROLOG_1 |
| 19 | EXCEPTION_PROLOG_2 |
| 20 | .endm |
| 21 | |
| 22 | .macro EXCEPTION_PROLOG_1 |
| 23 | mfspr r11,SPRN_SRR1 /* check whether user or kernel */ |
| 24 | andi. r11,r11,MSR_PR |
| 25 | tophys(r11,r1) /* use tophys(r1) if kernel */ |
| 26 | beq 1f |
| 27 | mfspr r11,SPRN_SPRG_THREAD |
| 28 | lwz r11,TASK_STACK-THREAD(r11) |
| 29 | addi r11,r11,THREAD_SIZE |
| 30 | tophys(r11,r11) |
| 31 | 1: subi r11,r11,INT_FRAME_SIZE /* alloc exc. frame */ |
| 32 | .endm |
| 33 | |
| 34 | .macro EXCEPTION_PROLOG_2 |
| 35 | stw r10,_CCR(r11) /* save registers */ |
| 36 | stw r12,GPR12(r11) |
| 37 | stw r9,GPR9(r11) |
| 38 | mfspr r10,SPRN_SPRG_SCRATCH0 |
| 39 | stw r10,GPR10(r11) |
| 40 | mfspr r12,SPRN_SPRG_SCRATCH1 |
| 41 | stw r12,GPR11(r11) |
| 42 | mflr r10 |
| 43 | stw r10,_LINK(r11) |
| 44 | mfspr r12,SPRN_SRR0 |
| 45 | mfspr r9,SPRN_SRR1 |
| 46 | stw r1,GPR1(r11) |
| 47 | stw r1,0(r11) |
| 48 | tovirt(r1,r11) /* set new kernel sp */ |
Christophe Leroy | 90f204b | 2019-04-30 12:38:56 +0000 | [diff] [blame] | 49 | #ifdef CONFIG_40x |
| 50 | rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ |
| 51 | #else |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 52 | li r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */ |
Christophe Leroy | 39bccfd | 2019-12-21 08:32:22 +0000 | [diff] [blame^] | 53 | mtmsr r10 /* (except for mach check in rtas) */ |
Christophe Leroy | 90f204b | 2019-04-30 12:38:56 +0000 | [diff] [blame] | 54 | #endif |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 55 | stw r0,GPR0(r11) |
| 56 | lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ |
| 57 | addi r10,r10,STACK_FRAME_REGS_MARKER@l |
| 58 | stw r10,8(r11) |
| 59 | SAVE_4GPRS(3, r11) |
| 60 | SAVE_2GPRS(7, r11) |
| 61 | .endm |
| 62 | |
Christophe Leroy | b86fb88 | 2019-04-30 12:39:02 +0000 | [diff] [blame] | 63 | .macro SYSCALL_ENTRY trapno |
| 64 | mfspr r12,SPRN_SPRG_THREAD |
| 65 | mfcr r10 |
| 66 | lwz r11,TASK_STACK-THREAD(r12) |
| 67 | mflr r9 |
| 68 | addi r11,r11,THREAD_SIZE - INT_FRAME_SIZE |
| 69 | rlwinm r10,r10,0,4,2 /* Clear SO bit in CR */ |
| 70 | tophys(r11,r11) |
| 71 | stw r10,_CCR(r11) /* save registers */ |
| 72 | mfspr r10,SPRN_SRR0 |
| 73 | stw r9,_LINK(r11) |
| 74 | mfspr r9,SPRN_SRR1 |
| 75 | stw r1,GPR1(r11) |
| 76 | stw r1,0(r11) |
| 77 | tovirt(r1,r11) /* set new kernel sp */ |
| 78 | stw r10,_NIP(r11) |
| 79 | #ifdef CONFIG_40x |
| 80 | rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ |
| 81 | #else |
Christophe Leroy | ba18025 | 2019-08-20 14:34:13 +0000 | [diff] [blame] | 82 | LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */ |
Christophe Leroy | 39bccfd | 2019-12-21 08:32:22 +0000 | [diff] [blame^] | 83 | mtmsr r10 /* (except for mach check in rtas) */ |
Christophe Leroy | b86fb88 | 2019-04-30 12:39:02 +0000 | [diff] [blame] | 84 | #endif |
| 85 | lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ |
| 86 | stw r2,GPR2(r11) |
| 87 | addi r10,r10,STACK_FRAME_REGS_MARKER@l |
| 88 | stw r9,_MSR(r11) |
| 89 | li r2, \trapno + 1 |
| 90 | stw r10,8(r11) |
| 91 | stw r2,_TRAP(r11) |
| 92 | SAVE_GPR(0, r11) |
| 93 | SAVE_4GPRS(3, r11) |
| 94 | SAVE_2GPRS(7, r11) |
| 95 | addi r11,r1,STACK_FRAME_OVERHEAD |
| 96 | addi r2,r12,-THREAD |
| 97 | stw r11,PT_REGS(r12) |
| 98 | #if defined(CONFIG_40x) |
| 99 | /* Check to see if the dbcr0 register is set up to debug. Use the |
| 100 | internal debug mode bit to do this. */ |
| 101 | lwz r12,THREAD_DBCR0(r12) |
| 102 | andis. r12,r12,DBCR0_IDM@h |
| 103 | #endif |
| 104 | ACCOUNT_CPU_USER_ENTRY(r2, r11, r12) |
| 105 | #if defined(CONFIG_40x) |
| 106 | beq+ 3f |
| 107 | /* From user and task is ptraced - load up global dbcr0 */ |
| 108 | li r12,-1 /* clear all pending debug events */ |
| 109 | mtspr SPRN_DBSR,r12 |
| 110 | lis r11,global_dbcr0@ha |
| 111 | tophys(r11,r11) |
| 112 | addi r11,r11,global_dbcr0@l |
| 113 | lwz r12,0(r11) |
| 114 | mtspr SPRN_DBCR0,r12 |
| 115 | lwz r12,4(r11) |
| 116 | addi r12,r12,-1 |
| 117 | stw r12,4(r11) |
| 118 | #endif |
| 119 | |
| 120 | 3: |
| 121 | tovirt(r2, r2) /* set r2 to current */ |
| 122 | lis r11, transfer_to_syscall@h |
| 123 | ori r11, r11, transfer_to_syscall@l |
| 124 | #ifdef CONFIG_TRACE_IRQFLAGS |
| 125 | /* |
| 126 | * If MSR is changing we need to keep interrupts disabled at this point |
| 127 | * otherwise we might risk taking an interrupt before we tell lockdep |
| 128 | * they are enabled. |
| 129 | */ |
Christophe Leroy | ba18025 | 2019-08-20 14:34:13 +0000 | [diff] [blame] | 130 | LOAD_REG_IMMEDIATE(r10, MSR_KERNEL) |
Christophe Leroy | b86fb88 | 2019-04-30 12:39:02 +0000 | [diff] [blame] | 131 | rlwimi r10, r9, 0, MSR_EE |
| 132 | #else |
Christophe Leroy | ba18025 | 2019-08-20 14:34:13 +0000 | [diff] [blame] | 133 | LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE) |
Christophe Leroy | b86fb88 | 2019-04-30 12:39:02 +0000 | [diff] [blame] | 134 | #endif |
| 135 | #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS) |
| 136 | mtspr SPRN_NRI, r0 |
| 137 | #endif |
| 138 | mtspr SPRN_SRR1,r10 |
| 139 | mtspr SPRN_SRR0,r11 |
| 140 | SYNC |
| 141 | RFI /* jump to handler, enable MMU */ |
| 142 | .endm |
| 143 | |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 144 | /* |
| 145 | * Note: code which follows this uses cr0.eq (set if from kernel), |
| 146 | * r11, r12 (SRR0), and r9 (SRR1). |
| 147 | * |
| 148 | * Note2: once we have set r1 we are in a position to take exceptions |
| 149 | * again, and we could thus set MSR:RI at that point. |
| 150 | */ |
| 151 | |
| 152 | /* |
| 153 | * Exception vectors. |
| 154 | */ |
| 155 | #ifdef CONFIG_PPC_BOOK3S |
| 156 | #define START_EXCEPTION(n, label) \ |
| 157 | . = n; \ |
| 158 | DO_KVM n; \ |
| 159 | label: |
| 160 | |
| 161 | #else |
| 162 | #define START_EXCEPTION(n, label) \ |
| 163 | . = n; \ |
| 164 | label: |
| 165 | |
| 166 | #endif |
| 167 | |
| 168 | #define EXCEPTION(n, label, hdlr, xfer) \ |
| 169 | START_EXCEPTION(n, label) \ |
| 170 | EXCEPTION_PROLOG; \ |
| 171 | addi r3,r1,STACK_FRAME_OVERHEAD; \ |
| 172 | xfer(n, hdlr) |
| 173 | |
Christophe Leroy | 1ae99b4 | 2019-04-30 12:39:00 +0000 | [diff] [blame] | 174 | #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \ |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 175 | li r10,trap; \ |
| 176 | stw r10,_TRAP(r11); \ |
Christophe Leroy | ba18025 | 2019-08-20 14:34:13 +0000 | [diff] [blame] | 177 | LOAD_REG_IMMEDIATE(r10, msr); \ |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 178 | bl tfer; \ |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 179 | .long hdlr; \ |
| 180 | .long ret |
| 181 | |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 182 | #define EXC_XFER_STD(n, hdlr) \ |
Christophe Leroy | 1ae99b4 | 2019-04-30 12:39:00 +0000 | [diff] [blame] | 183 | EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full, \ |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 184 | ret_from_except_full) |
| 185 | |
| 186 | #define EXC_XFER_LITE(n, hdlr) \ |
Christophe Leroy | 1ae99b4 | 2019-04-30 12:39:00 +0000 | [diff] [blame] | 187 | EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \ |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 188 | ret_from_except) |
| 189 | |
Christophe Leroy | 8a23fdec | 2019-04-30 12:38:50 +0000 | [diff] [blame] | 190 | #endif /* __HEAD_32_H__ */ |