Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 1 | /* |
| 2 | * OpenRISC head.S |
| 3 | * |
| 4 | * Linux architectural port borrowing liberally from similar works of |
| 5 | * others. All original copyrights apply as per the original source |
| 6 | * declaration. |
| 7 | * |
| 8 | * Modifications for the OpenRISC architecture: |
| 9 | * Copyright (C) 2003 Matjaz Breskvar <phoenix@bsemi.com> |
| 10 | * Copyright (C) 2010-2011 Jonas Bonn <jonas@southpole.se> |
| 11 | * |
| 12 | * This program is free software; you can redistribute it and/or |
| 13 | * modify it under the terms of the GNU General Public License |
| 14 | * as published by the Free Software Foundation; either version |
| 15 | * 2 of the License, or (at your option) any later version. |
| 16 | */ |
| 17 | |
| 18 | #include <linux/linkage.h> |
| 19 | #include <linux/threads.h> |
| 20 | #include <linux/errno.h> |
| 21 | #include <linux/init.h> |
Stefan Kristiansson | 160d837 | 2013-02-26 07:36:29 +0100 | [diff] [blame] | 22 | #include <linux/serial_reg.h> |
Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 23 | #include <asm/processor.h> |
| 24 | #include <asm/page.h> |
| 25 | #include <asm/mmu.h> |
| 26 | #include <asm/pgtable.h> |
| 27 | #include <asm/cache.h> |
| 28 | #include <asm/spr_defs.h> |
| 29 | #include <asm/asm-offsets.h> |
Stefan Kristiansson | dec8301 | 2011-11-10 16:38:29 +0100 | [diff] [blame] | 30 | #include <linux/of_fdt.h> |
Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 31 | |
| 32 | #define tophys(rd,rs) \ |
| 33 | l.movhi rd,hi(-KERNELBASE) ;\ |
| 34 | l.add rd,rd,rs |
| 35 | |
| 36 | #define CLEAR_GPR(gpr) \ |
| 37 | l.or gpr,r0,r0 |
| 38 | |
| 39 | #define LOAD_SYMBOL_2_GPR(gpr,symbol) \ |
| 40 | l.movhi gpr,hi(symbol) ;\ |
| 41 | l.ori gpr,gpr,lo(symbol) |
| 42 | |
| 43 | |
| 44 | #define UART_BASE_ADD 0x90000000 |
| 45 | |
| 46 | #define EXCEPTION_SR (SPR_SR_DME | SPR_SR_IME | SPR_SR_DCE | SPR_SR_ICE | SPR_SR_SM) |
| 47 | #define SYSCALL_SR (SPR_SR_DME | SPR_SR_IME | SPR_SR_DCE | SPR_SR_ICE | SPR_SR_IEE | SPR_SR_TEE | SPR_SR_SM) |
| 48 | |
| 49 | /* ============================================[ tmp store locations ]=== */ |
| 50 | |
| 51 | /* |
| 52 | * emergency_print temporary stores |
| 53 | */ |
| 54 | #define EMERGENCY_PRINT_STORE_GPR4 l.sw 0x20(r0),r4 |
| 55 | #define EMERGENCY_PRINT_LOAD_GPR4 l.lwz r4,0x20(r0) |
| 56 | |
| 57 | #define EMERGENCY_PRINT_STORE_GPR5 l.sw 0x24(r0),r5 |
| 58 | #define EMERGENCY_PRINT_LOAD_GPR5 l.lwz r5,0x24(r0) |
| 59 | |
| 60 | #define EMERGENCY_PRINT_STORE_GPR6 l.sw 0x28(r0),r6 |
| 61 | #define EMERGENCY_PRINT_LOAD_GPR6 l.lwz r6,0x28(r0) |
| 62 | |
| 63 | #define EMERGENCY_PRINT_STORE_GPR7 l.sw 0x2c(r0),r7 |
| 64 | #define EMERGENCY_PRINT_LOAD_GPR7 l.lwz r7,0x2c(r0) |
| 65 | |
| 66 | #define EMERGENCY_PRINT_STORE_GPR8 l.sw 0x30(r0),r8 |
| 67 | #define EMERGENCY_PRINT_LOAD_GPR8 l.lwz r8,0x30(r0) |
| 68 | |
| 69 | #define EMERGENCY_PRINT_STORE_GPR9 l.sw 0x34(r0),r9 |
| 70 | #define EMERGENCY_PRINT_LOAD_GPR9 l.lwz r9,0x34(r0) |
| 71 | |
| 72 | |
| 73 | /* |
| 74 | * TLB miss handlers temorary stores |
| 75 | */ |
| 76 | #define EXCEPTION_STORE_GPR9 l.sw 0x10(r0),r9 |
| 77 | #define EXCEPTION_LOAD_GPR9 l.lwz r9,0x10(r0) |
| 78 | |
| 79 | #define EXCEPTION_STORE_GPR2 l.sw 0x64(r0),r2 |
| 80 | #define EXCEPTION_LOAD_GPR2 l.lwz r2,0x64(r0) |
| 81 | |
| 82 | #define EXCEPTION_STORE_GPR3 l.sw 0x68(r0),r3 |
| 83 | #define EXCEPTION_LOAD_GPR3 l.lwz r3,0x68(r0) |
| 84 | |
| 85 | #define EXCEPTION_STORE_GPR4 l.sw 0x6c(r0),r4 |
| 86 | #define EXCEPTION_LOAD_GPR4 l.lwz r4,0x6c(r0) |
| 87 | |
| 88 | #define EXCEPTION_STORE_GPR5 l.sw 0x70(r0),r5 |
| 89 | #define EXCEPTION_LOAD_GPR5 l.lwz r5,0x70(r0) |
| 90 | |
| 91 | #define EXCEPTION_STORE_GPR6 l.sw 0x74(r0),r6 |
| 92 | #define EXCEPTION_LOAD_GPR6 l.lwz r6,0x74(r0) |
| 93 | |
| 94 | |
| 95 | /* |
| 96 | * EXCEPTION_HANDLE temporary stores |
| 97 | */ |
| 98 | |
| 99 | #define EXCEPTION_T_STORE_GPR30 l.sw 0x78(r0),r30 |
| 100 | #define EXCEPTION_T_LOAD_GPR30(reg) l.lwz reg,0x78(r0) |
| 101 | |
| 102 | #define EXCEPTION_T_STORE_GPR10 l.sw 0x7c(r0),r10 |
| 103 | #define EXCEPTION_T_LOAD_GPR10(reg) l.lwz reg,0x7c(r0) |
| 104 | |
| 105 | #define EXCEPTION_T_STORE_SP l.sw 0x80(r0),r1 |
| 106 | #define EXCEPTION_T_LOAD_SP(reg) l.lwz reg,0x80(r0) |
| 107 | |
| 108 | /* |
| 109 | * For UNHANLDED_EXCEPTION |
| 110 | */ |
| 111 | |
| 112 | #define EXCEPTION_T_STORE_GPR31 l.sw 0x84(r0),r31 |
| 113 | #define EXCEPTION_T_LOAD_GPR31(reg) l.lwz reg,0x84(r0) |
| 114 | |
| 115 | /* =========================================================[ macros ]=== */ |
| 116 | |
| 117 | |
| 118 | #define GET_CURRENT_PGD(reg,t1) \ |
| 119 | LOAD_SYMBOL_2_GPR(reg,current_pgd) ;\ |
| 120 | tophys (t1,reg) ;\ |
| 121 | l.lwz reg,0(t1) |
| 122 | |
| 123 | |
| 124 | /* |
| 125 | * DSCR: this is a common hook for handling exceptions. it will save |
| 126 | * the needed registers, set up stack and pointer to current |
| 127 | * then jump to the handler while enabling MMU |
| 128 | * |
| 129 | * PRMS: handler - a function to jump to. it has to save the |
| 130 | * remaining registers to kernel stack, call |
| 131 | * appropriate arch-independant exception handler |
| 132 | * and finaly jump to ret_from_except |
| 133 | * |
| 134 | * PREQ: unchanged state from the time exception happened |
| 135 | * |
| 136 | * POST: SAVED the following registers original value |
| 137 | * to the new created exception frame pointed to by r1 |
| 138 | * |
| 139 | * r1 - ksp pointing to the new (exception) frame |
| 140 | * r4 - EEAR exception EA |
| 141 | * r10 - current pointing to current_thread_info struct |
| 142 | * r12 - syscall 0, since we didn't come from syscall |
| 143 | * r13 - temp it actually contains new SR, not needed anymore |
| 144 | * r31 - handler address of the handler we'll jump to |
| 145 | * |
| 146 | * handler has to save remaining registers to the exception |
| 147 | * ksp frame *before* tainting them! |
| 148 | * |
| 149 | * NOTE: this function is not reentrant per se. reentrancy is guaranteed |
| 150 | * by processor disabling all exceptions/interrupts when exception |
| 151 | * accours. |
| 152 | * |
| 153 | * OPTM: no need to make it so wasteful to extract ksp when in user mode |
| 154 | */ |
| 155 | |
| 156 | #define EXCEPTION_HANDLE(handler) \ |
| 157 | EXCEPTION_T_STORE_GPR30 ;\ |
| 158 | l.mfspr r30,r0,SPR_ESR_BASE ;\ |
| 159 | l.andi r30,r30,SPR_SR_SM ;\ |
| 160 | l.sfeqi r30,0 ;\ |
| 161 | EXCEPTION_T_STORE_GPR10 ;\ |
| 162 | l.bnf 2f /* kernel_mode */ ;\ |
| 163 | EXCEPTION_T_STORE_SP /* delay slot */ ;\ |
| 164 | 1: /* user_mode: */ ;\ |
| 165 | LOAD_SYMBOL_2_GPR(r1,current_thread_info_set) ;\ |
| 166 | tophys (r30,r1) ;\ |
| 167 | /* r10: current_thread_info */ ;\ |
| 168 | l.lwz r10,0(r30) ;\ |
| 169 | tophys (r30,r10) ;\ |
| 170 | l.lwz r1,(TI_KSP)(r30) ;\ |
| 171 | /* fall through */ ;\ |
| 172 | 2: /* kernel_mode: */ ;\ |
| 173 | /* create new stack frame, save only needed gprs */ ;\ |
| 174 | /* r1: KSP, r10: current, r4: EEAR, r31: __pa(KSP) */ ;\ |
| 175 | /* r12: temp, syscall indicator */ ;\ |
| 176 | l.addi r1,r1,-(INT_FRAME_SIZE) ;\ |
| 177 | /* r1 is KSP, r30 is __pa(KSP) */ ;\ |
| 178 | tophys (r30,r1) ;\ |
| 179 | l.sw PT_GPR12(r30),r12 ;\ |
| 180 | l.mfspr r12,r0,SPR_EPCR_BASE ;\ |
| 181 | l.sw PT_PC(r30),r12 ;\ |
| 182 | l.mfspr r12,r0,SPR_ESR_BASE ;\ |
| 183 | l.sw PT_SR(r30),r12 ;\ |
| 184 | /* save r30 */ ;\ |
| 185 | EXCEPTION_T_LOAD_GPR30(r12) ;\ |
| 186 | l.sw PT_GPR30(r30),r12 ;\ |
| 187 | /* save r10 as was prior to exception */ ;\ |
| 188 | EXCEPTION_T_LOAD_GPR10(r12) ;\ |
| 189 | l.sw PT_GPR10(r30),r12 ;\ |
| 190 | /* save PT_SP as was prior to exception */ ;\ |
| 191 | EXCEPTION_T_LOAD_SP(r12) ;\ |
| 192 | l.sw PT_SP(r30),r12 ;\ |
| 193 | /* save exception r4, set r4 = EA */ ;\ |
| 194 | l.sw PT_GPR4(r30),r4 ;\ |
| 195 | l.mfspr r4,r0,SPR_EEAR_BASE ;\ |
| 196 | /* r12 == 1 if we come from syscall */ ;\ |
| 197 | CLEAR_GPR(r12) ;\ |
| 198 | /* ----- turn on MMU ----- */ ;\ |
| 199 | l.ori r30,r0,(EXCEPTION_SR) ;\ |
| 200 | l.mtspr r0,r30,SPR_ESR_BASE ;\ |
| 201 | /* r30: EA address of handler */ ;\ |
| 202 | LOAD_SYMBOL_2_GPR(r30,handler) ;\ |
| 203 | l.mtspr r0,r30,SPR_EPCR_BASE ;\ |
| 204 | l.rfe |
| 205 | |
| 206 | /* |
| 207 | * this doesn't work |
| 208 | * |
| 209 | * |
| 210 | * #ifdef CONFIG_JUMP_UPON_UNHANDLED_EXCEPTION |
| 211 | * #define UNHANDLED_EXCEPTION(handler) \ |
| 212 | * l.ori r3,r0,0x1 ;\ |
| 213 | * l.mtspr r0,r3,SPR_SR ;\ |
| 214 | * l.movhi r3,hi(0xf0000100) ;\ |
| 215 | * l.ori r3,r3,lo(0xf0000100) ;\ |
| 216 | * l.jr r3 ;\ |
| 217 | * l.nop 1 |
| 218 | * |
| 219 | * #endif |
| 220 | */ |
| 221 | |
| 222 | /* DSCR: this is the same as EXCEPTION_HANDLE(), we are just |
| 223 | * a bit more carefull (if we have a PT_SP or current pointer |
| 224 | * corruption) and set them up from 'current_set' |
| 225 | * |
| 226 | */ |
| 227 | #define UNHANDLED_EXCEPTION(handler) \ |
| 228 | EXCEPTION_T_STORE_GPR31 ;\ |
| 229 | EXCEPTION_T_STORE_GPR10 ;\ |
| 230 | EXCEPTION_T_STORE_SP ;\ |
| 231 | /* temporary store r3, r9 into r1, r10 */ ;\ |
| 232 | l.addi r1,r3,0x0 ;\ |
| 233 | l.addi r10,r9,0x0 ;\ |
| 234 | /* the string referenced by r3 must be low enough */ ;\ |
| 235 | l.jal _emergency_print ;\ |
| 236 | l.ori r3,r0,lo(_string_unhandled_exception) ;\ |
| 237 | l.mfspr r3,r0,SPR_NPC ;\ |
| 238 | l.jal _emergency_print_nr ;\ |
| 239 | l.andi r3,r3,0x1f00 ;\ |
| 240 | /* the string referenced by r3 must be low enough */ ;\ |
| 241 | l.jal _emergency_print ;\ |
| 242 | l.ori r3,r0,lo(_string_epc_prefix) ;\ |
| 243 | l.jal _emergency_print_nr ;\ |
| 244 | l.mfspr r3,r0,SPR_EPCR_BASE ;\ |
| 245 | l.jal _emergency_print ;\ |
| 246 | l.ori r3,r0,lo(_string_nl) ;\ |
| 247 | /* end of printing */ ;\ |
| 248 | l.addi r3,r1,0x0 ;\ |
| 249 | l.addi r9,r10,0x0 ;\ |
| 250 | /* extract current, ksp from current_set */ ;\ |
| 251 | LOAD_SYMBOL_2_GPR(r1,_unhandled_stack_top) ;\ |
| 252 | LOAD_SYMBOL_2_GPR(r10,init_thread_union) ;\ |
| 253 | /* create new stack frame, save only needed gprs */ ;\ |
| 254 | /* r1: KSP, r10: current, r31: __pa(KSP) */ ;\ |
| 255 | /* r12: temp, syscall indicator, r13 temp */ ;\ |
| 256 | l.addi r1,r1,-(INT_FRAME_SIZE) ;\ |
| 257 | /* r1 is KSP, r31 is __pa(KSP) */ ;\ |
| 258 | tophys (r31,r1) ;\ |
| 259 | l.sw PT_GPR12(r31),r12 ;\ |
| 260 | l.mfspr r12,r0,SPR_EPCR_BASE ;\ |
| 261 | l.sw PT_PC(r31),r12 ;\ |
| 262 | l.mfspr r12,r0,SPR_ESR_BASE ;\ |
| 263 | l.sw PT_SR(r31),r12 ;\ |
| 264 | /* save r31 */ ;\ |
| 265 | EXCEPTION_T_LOAD_GPR31(r12) ;\ |
| 266 | l.sw PT_GPR31(r31),r12 ;\ |
| 267 | /* save r10 as was prior to exception */ ;\ |
| 268 | EXCEPTION_T_LOAD_GPR10(r12) ;\ |
| 269 | l.sw PT_GPR10(r31),r12 ;\ |
| 270 | /* save PT_SP as was prior to exception */ ;\ |
| 271 | EXCEPTION_T_LOAD_SP(r12) ;\ |
| 272 | l.sw PT_SP(r31),r12 ;\ |
| 273 | l.sw PT_GPR13(r31),r13 ;\ |
| 274 | /* --> */ ;\ |
| 275 | /* save exception r4, set r4 = EA */ ;\ |
| 276 | l.sw PT_GPR4(r31),r4 ;\ |
| 277 | l.mfspr r4,r0,SPR_EEAR_BASE ;\ |
| 278 | /* r12 == 1 if we come from syscall */ ;\ |
| 279 | CLEAR_GPR(r12) ;\ |
| 280 | /* ----- play a MMU trick ----- */ ;\ |
| 281 | l.ori r31,r0,(EXCEPTION_SR) ;\ |
| 282 | l.mtspr r0,r31,SPR_ESR_BASE ;\ |
| 283 | /* r31: EA address of handler */ ;\ |
| 284 | LOAD_SYMBOL_2_GPR(r31,handler) ;\ |
| 285 | l.mtspr r0,r31,SPR_EPCR_BASE ;\ |
| 286 | l.rfe |
| 287 | |
| 288 | /* =====================================================[ exceptions] === */ |
| 289 | |
| 290 | /* ---[ 0x100: RESET exception ]----------------------------------------- */ |
| 291 | .org 0x100 |
| 292 | /* Jump to .init code at _start which lives in the .head section |
| 293 | * and will be discarded after boot. |
| 294 | */ |
Stefan Kristiansson | 54bd7c5 | 2012-10-12 09:38:18 +0300 | [diff] [blame] | 295 | LOAD_SYMBOL_2_GPR(r15, _start) |
| 296 | tophys (r13,r15) /* MMU disabled */ |
| 297 | l.jr r13 |
Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 298 | l.nop |
| 299 | |
| 300 | /* ---[ 0x200: BUS exception ]------------------------------------------- */ |
| 301 | .org 0x200 |
| 302 | _dispatch_bus_fault: |
| 303 | EXCEPTION_HANDLE(_bus_fault_handler) |
| 304 | |
| 305 | /* ---[ 0x300: Data Page Fault exception ]------------------------------- */ |
| 306 | .org 0x300 |
| 307 | _dispatch_do_dpage_fault: |
| 308 | // totaly disable timer interrupt |
| 309 | // l.mtspr r0,r0,SPR_TTMR |
| 310 | // DEBUG_TLB_PROBE(0x300) |
| 311 | // EXCEPTION_DEBUG_VALUE_ER_ENABLED(0x300) |
| 312 | EXCEPTION_HANDLE(_data_page_fault_handler) |
| 313 | |
| 314 | /* ---[ 0x400: Insn Page Fault exception ]------------------------------- */ |
| 315 | .org 0x400 |
| 316 | _dispatch_do_ipage_fault: |
| 317 | // totaly disable timer interrupt |
| 318 | // l.mtspr r0,r0,SPR_TTMR |
| 319 | // DEBUG_TLB_PROBE(0x400) |
| 320 | // EXCEPTION_DEBUG_VALUE_ER_ENABLED(0x400) |
| 321 | EXCEPTION_HANDLE(_insn_page_fault_handler) |
| 322 | |
| 323 | /* ---[ 0x500: Timer exception ]----------------------------------------- */ |
| 324 | .org 0x500 |
| 325 | EXCEPTION_HANDLE(_timer_handler) |
| 326 | |
| 327 | /* ---[ 0x600: Aligment exception ]-------------------------------------- */ |
| 328 | .org 0x600 |
| 329 | EXCEPTION_HANDLE(_alignment_handler) |
| 330 | |
| 331 | /* ---[ 0x700: Illegal insn exception ]---------------------------------- */ |
| 332 | .org 0x700 |
| 333 | EXCEPTION_HANDLE(_illegal_instruction_handler) |
| 334 | |
| 335 | /* ---[ 0x800: External interrupt exception ]---------------------------- */ |
| 336 | .org 0x800 |
| 337 | EXCEPTION_HANDLE(_external_irq_handler) |
| 338 | |
| 339 | /* ---[ 0x900: DTLB miss exception ]------------------------------------- */ |
| 340 | .org 0x900 |
| 341 | l.j boot_dtlb_miss_handler |
| 342 | l.nop |
| 343 | |
| 344 | /* ---[ 0xa00: ITLB miss exception ]------------------------------------- */ |
| 345 | .org 0xa00 |
| 346 | l.j boot_itlb_miss_handler |
| 347 | l.nop |
| 348 | |
| 349 | /* ---[ 0xb00: Range exception ]----------------------------------------- */ |
| 350 | .org 0xb00 |
| 351 | UNHANDLED_EXCEPTION(_vector_0xb00) |
| 352 | |
| 353 | /* ---[ 0xc00: Syscall exception ]--------------------------------------- */ |
| 354 | .org 0xc00 |
| 355 | EXCEPTION_HANDLE(_sys_call_handler) |
| 356 | |
| 357 | /* ---[ 0xd00: Trap exception ]------------------------------------------ */ |
| 358 | .org 0xd00 |
| 359 | UNHANDLED_EXCEPTION(_vector_0xd00) |
| 360 | |
| 361 | /* ---[ 0xe00: Trap exception ]------------------------------------------ */ |
| 362 | .org 0xe00 |
| 363 | // UNHANDLED_EXCEPTION(_vector_0xe00) |
| 364 | EXCEPTION_HANDLE(_trap_handler) |
| 365 | |
| 366 | /* ---[ 0xf00: Reserved exception ]-------------------------------------- */ |
| 367 | .org 0xf00 |
| 368 | UNHANDLED_EXCEPTION(_vector_0xf00) |
| 369 | |
| 370 | /* ---[ 0x1000: Reserved exception ]------------------------------------- */ |
| 371 | .org 0x1000 |
| 372 | UNHANDLED_EXCEPTION(_vector_0x1000) |
| 373 | |
| 374 | /* ---[ 0x1100: Reserved exception ]------------------------------------- */ |
| 375 | .org 0x1100 |
| 376 | UNHANDLED_EXCEPTION(_vector_0x1100) |
| 377 | |
| 378 | /* ---[ 0x1200: Reserved exception ]------------------------------------- */ |
| 379 | .org 0x1200 |
| 380 | UNHANDLED_EXCEPTION(_vector_0x1200) |
| 381 | |
| 382 | /* ---[ 0x1300: Reserved exception ]------------------------------------- */ |
| 383 | .org 0x1300 |
| 384 | UNHANDLED_EXCEPTION(_vector_0x1300) |
| 385 | |
| 386 | /* ---[ 0x1400: Reserved exception ]------------------------------------- */ |
| 387 | .org 0x1400 |
| 388 | UNHANDLED_EXCEPTION(_vector_0x1400) |
| 389 | |
| 390 | /* ---[ 0x1500: Reserved exception ]------------------------------------- */ |
| 391 | .org 0x1500 |
| 392 | UNHANDLED_EXCEPTION(_vector_0x1500) |
| 393 | |
| 394 | /* ---[ 0x1600: Reserved exception ]------------------------------------- */ |
| 395 | .org 0x1600 |
| 396 | UNHANDLED_EXCEPTION(_vector_0x1600) |
| 397 | |
| 398 | /* ---[ 0x1700: Reserved exception ]------------------------------------- */ |
| 399 | .org 0x1700 |
| 400 | UNHANDLED_EXCEPTION(_vector_0x1700) |
| 401 | |
| 402 | /* ---[ 0x1800: Reserved exception ]------------------------------------- */ |
| 403 | .org 0x1800 |
| 404 | UNHANDLED_EXCEPTION(_vector_0x1800) |
| 405 | |
| 406 | /* ---[ 0x1900: Reserved exception ]------------------------------------- */ |
| 407 | .org 0x1900 |
| 408 | UNHANDLED_EXCEPTION(_vector_0x1900) |
| 409 | |
| 410 | /* ---[ 0x1a00: Reserved exception ]------------------------------------- */ |
| 411 | .org 0x1a00 |
| 412 | UNHANDLED_EXCEPTION(_vector_0x1a00) |
| 413 | |
| 414 | /* ---[ 0x1b00: Reserved exception ]------------------------------------- */ |
| 415 | .org 0x1b00 |
| 416 | UNHANDLED_EXCEPTION(_vector_0x1b00) |
| 417 | |
| 418 | /* ---[ 0x1c00: Reserved exception ]------------------------------------- */ |
| 419 | .org 0x1c00 |
| 420 | UNHANDLED_EXCEPTION(_vector_0x1c00) |
| 421 | |
| 422 | /* ---[ 0x1d00: Reserved exception ]------------------------------------- */ |
| 423 | .org 0x1d00 |
| 424 | UNHANDLED_EXCEPTION(_vector_0x1d00) |
| 425 | |
| 426 | /* ---[ 0x1e00: Reserved exception ]------------------------------------- */ |
| 427 | .org 0x1e00 |
| 428 | UNHANDLED_EXCEPTION(_vector_0x1e00) |
| 429 | |
| 430 | /* ---[ 0x1f00: Reserved exception ]------------------------------------- */ |
| 431 | .org 0x1f00 |
| 432 | UNHANDLED_EXCEPTION(_vector_0x1f00) |
| 433 | |
| 434 | .org 0x2000 |
| 435 | /* ===================================================[ kernel start ]=== */ |
| 436 | |
| 437 | /* .text*/ |
| 438 | |
| 439 | /* This early stuff belongs in HEAD, but some of the functions below definitely |
| 440 | * don't... */ |
| 441 | |
| 442 | __HEAD |
| 443 | .global _start |
| 444 | _start: |
Stefan Kristiansson | dec8301 | 2011-11-10 16:38:29 +0100 | [diff] [blame] | 445 | /* save kernel parameters */ |
| 446 | l.or r25,r0,r3 /* pointer to fdt */ |
| 447 | |
Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 448 | /* |
| 449 | * ensure a deterministic start |
| 450 | */ |
| 451 | |
| 452 | l.ori r3,r0,0x1 |
| 453 | l.mtspr r0,r3,SPR_SR |
| 454 | |
| 455 | CLEAR_GPR(r1) |
| 456 | CLEAR_GPR(r2) |
| 457 | CLEAR_GPR(r3) |
| 458 | CLEAR_GPR(r4) |
| 459 | CLEAR_GPR(r5) |
| 460 | CLEAR_GPR(r6) |
| 461 | CLEAR_GPR(r7) |
| 462 | CLEAR_GPR(r8) |
| 463 | CLEAR_GPR(r9) |
| 464 | CLEAR_GPR(r10) |
| 465 | CLEAR_GPR(r11) |
| 466 | CLEAR_GPR(r12) |
| 467 | CLEAR_GPR(r13) |
| 468 | CLEAR_GPR(r14) |
| 469 | CLEAR_GPR(r15) |
| 470 | CLEAR_GPR(r16) |
| 471 | CLEAR_GPR(r17) |
| 472 | CLEAR_GPR(r18) |
| 473 | CLEAR_GPR(r19) |
| 474 | CLEAR_GPR(r20) |
| 475 | CLEAR_GPR(r21) |
| 476 | CLEAR_GPR(r22) |
| 477 | CLEAR_GPR(r23) |
| 478 | CLEAR_GPR(r24) |
Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 479 | CLEAR_GPR(r26) |
| 480 | CLEAR_GPR(r27) |
| 481 | CLEAR_GPR(r28) |
| 482 | CLEAR_GPR(r29) |
| 483 | CLEAR_GPR(r30) |
| 484 | CLEAR_GPR(r31) |
| 485 | |
| 486 | /* |
| 487 | * set up initial ksp and current |
| 488 | */ |
| 489 | LOAD_SYMBOL_2_GPR(r1,init_thread_union+0x2000) // setup kernel stack |
| 490 | LOAD_SYMBOL_2_GPR(r10,init_thread_union) // setup current |
| 491 | tophys (r31,r10) |
| 492 | l.sw TI_KSP(r31), r1 |
| 493 | |
| 494 | l.ori r4,r0,0x0 |
| 495 | |
| 496 | |
| 497 | /* |
| 498 | * .data contains initialized data, |
| 499 | * .bss contains uninitialized data - clear it up |
| 500 | */ |
| 501 | clear_bss: |
| 502 | LOAD_SYMBOL_2_GPR(r24, __bss_start) |
| 503 | LOAD_SYMBOL_2_GPR(r26, _end) |
| 504 | tophys(r28,r24) |
| 505 | tophys(r30,r26) |
| 506 | CLEAR_GPR(r24) |
| 507 | CLEAR_GPR(r26) |
| 508 | 1: |
| 509 | l.sw (0)(r28),r0 |
| 510 | l.sfltu r28,r30 |
| 511 | l.bf 1b |
| 512 | l.addi r28,r28,4 |
| 513 | |
| 514 | enable_ic: |
| 515 | l.jal _ic_enable |
| 516 | l.nop |
| 517 | |
| 518 | enable_dc: |
| 519 | l.jal _dc_enable |
| 520 | l.nop |
| 521 | |
| 522 | flush_tlb: |
| 523 | /* |
| 524 | * I N V A L I D A T E T L B e n t r i e s |
| 525 | */ |
| 526 | LOAD_SYMBOL_2_GPR(r5,SPR_DTLBMR_BASE(0)) |
| 527 | LOAD_SYMBOL_2_GPR(r6,SPR_ITLBMR_BASE(0)) |
| 528 | l.addi r7,r0,128 /* Maximum number of sets */ |
| 529 | 1: |
| 530 | l.mtspr r5,r0,0x0 |
| 531 | l.mtspr r6,r0,0x0 |
| 532 | |
| 533 | l.addi r5,r5,1 |
| 534 | l.addi r6,r6,1 |
| 535 | l.sfeq r7,r0 |
| 536 | l.bnf 1b |
| 537 | l.addi r7,r7,-1 |
| 538 | |
| 539 | |
| 540 | /* The MMU needs to be enabled before or32_early_setup is called */ |
| 541 | |
| 542 | enable_mmu: |
| 543 | /* |
| 544 | * enable dmmu & immu |
| 545 | * SR[5] = 0, SR[6] = 0, 6th and 7th bit of SR set to 0 |
| 546 | */ |
| 547 | l.mfspr r30,r0,SPR_SR |
| 548 | l.movhi r28,hi(SPR_SR_DME | SPR_SR_IME) |
| 549 | l.ori r28,r28,lo(SPR_SR_DME | SPR_SR_IME) |
| 550 | l.or r30,r30,r28 |
| 551 | l.mtspr r0,r30,SPR_SR |
| 552 | l.nop |
| 553 | l.nop |
| 554 | l.nop |
| 555 | l.nop |
| 556 | l.nop |
| 557 | l.nop |
| 558 | l.nop |
| 559 | l.nop |
| 560 | l.nop |
| 561 | l.nop |
| 562 | l.nop |
| 563 | l.nop |
| 564 | l.nop |
| 565 | l.nop |
| 566 | l.nop |
| 567 | l.nop |
| 568 | |
| 569 | // reset the simulation counters |
| 570 | l.nop 5 |
| 571 | |
Stefan Kristiansson | dec8301 | 2011-11-10 16:38:29 +0100 | [diff] [blame] | 572 | /* check fdt header magic word */ |
| 573 | l.lwz r3,0(r25) /* load magic from fdt into r3 */ |
| 574 | l.movhi r4,hi(OF_DT_HEADER) |
| 575 | l.ori r4,r4,lo(OF_DT_HEADER) |
| 576 | l.sfeq r3,r4 |
| 577 | l.bf _fdt_found |
| 578 | l.nop |
| 579 | /* magic number mismatch, set fdt pointer to null */ |
| 580 | l.or r25,r0,r0 |
| 581 | _fdt_found: |
| 582 | /* pass fdt pointer to or32_early_setup in r3 */ |
| 583 | l.or r3,r0,r25 |
Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 584 | LOAD_SYMBOL_2_GPR(r24, or32_early_setup) |
| 585 | l.jalr r24 |
| 586 | l.nop |
| 587 | |
| 588 | clear_regs: |
| 589 | /* |
| 590 | * clear all GPRS to increase determinism |
| 591 | */ |
| 592 | CLEAR_GPR(r2) |
| 593 | CLEAR_GPR(r3) |
| 594 | CLEAR_GPR(r4) |
| 595 | CLEAR_GPR(r5) |
| 596 | CLEAR_GPR(r6) |
| 597 | CLEAR_GPR(r7) |
| 598 | CLEAR_GPR(r8) |
| 599 | CLEAR_GPR(r9) |
| 600 | CLEAR_GPR(r11) |
| 601 | CLEAR_GPR(r12) |
| 602 | CLEAR_GPR(r13) |
| 603 | CLEAR_GPR(r14) |
| 604 | CLEAR_GPR(r15) |
| 605 | CLEAR_GPR(r16) |
| 606 | CLEAR_GPR(r17) |
| 607 | CLEAR_GPR(r18) |
| 608 | CLEAR_GPR(r19) |
| 609 | CLEAR_GPR(r20) |
| 610 | CLEAR_GPR(r21) |
| 611 | CLEAR_GPR(r22) |
| 612 | CLEAR_GPR(r23) |
| 613 | CLEAR_GPR(r24) |
| 614 | CLEAR_GPR(r25) |
| 615 | CLEAR_GPR(r26) |
| 616 | CLEAR_GPR(r27) |
| 617 | CLEAR_GPR(r28) |
| 618 | CLEAR_GPR(r29) |
| 619 | CLEAR_GPR(r30) |
| 620 | CLEAR_GPR(r31) |
| 621 | |
| 622 | jump_start_kernel: |
| 623 | /* |
| 624 | * jump to kernel entry (start_kernel) |
| 625 | */ |
| 626 | LOAD_SYMBOL_2_GPR(r30, start_kernel) |
| 627 | l.jr r30 |
| 628 | l.nop |
| 629 | |
| 630 | /* ========================================[ cache ]=== */ |
| 631 | |
| 632 | /* aligment here so we don't change memory offsets with |
| 633 | * memory controler defined |
| 634 | */ |
| 635 | .align 0x2000 |
| 636 | |
| 637 | _ic_enable: |
| 638 | /* Check if IC present and skip enabling otherwise */ |
| 639 | l.mfspr r24,r0,SPR_UPR |
| 640 | l.andi r26,r24,SPR_UPR_ICP |
| 641 | l.sfeq r26,r0 |
| 642 | l.bf 9f |
| 643 | l.nop |
| 644 | |
| 645 | /* Disable IC */ |
| 646 | l.mfspr r6,r0,SPR_SR |
| 647 | l.addi r5,r0,-1 |
| 648 | l.xori r5,r5,SPR_SR_ICE |
| 649 | l.and r5,r6,r5 |
| 650 | l.mtspr r0,r5,SPR_SR |
| 651 | |
| 652 | /* Establish cache block size |
| 653 | If BS=0, 16; |
| 654 | If BS=1, 32; |
| 655 | r14 contain block size |
| 656 | */ |
| 657 | l.mfspr r24,r0,SPR_ICCFGR |
| 658 | l.andi r26,r24,SPR_ICCFGR_CBS |
| 659 | l.srli r28,r26,7 |
| 660 | l.ori r30,r0,16 |
| 661 | l.sll r14,r30,r28 |
| 662 | |
| 663 | /* Establish number of cache sets |
| 664 | r16 contains number of cache sets |
| 665 | r28 contains log(# of cache sets) |
| 666 | */ |
| 667 | l.andi r26,r24,SPR_ICCFGR_NCS |
| 668 | l.srli r28,r26,3 |
| 669 | l.ori r30,r0,1 |
| 670 | l.sll r16,r30,r28 |
| 671 | |
| 672 | /* Invalidate IC */ |
| 673 | l.addi r6,r0,0 |
| 674 | l.sll r5,r14,r28 |
| 675 | // l.mul r5,r14,r16 |
| 676 | // l.trap 1 |
| 677 | // l.addi r5,r0,IC_SIZE |
| 678 | 1: |
| 679 | l.mtspr r0,r6,SPR_ICBIR |
| 680 | l.sfne r6,r5 |
| 681 | l.bf 1b |
| 682 | l.add r6,r6,r14 |
| 683 | // l.addi r6,r6,IC_LINE |
| 684 | |
| 685 | /* Enable IC */ |
| 686 | l.mfspr r6,r0,SPR_SR |
| 687 | l.ori r6,r6,SPR_SR_ICE |
| 688 | l.mtspr r0,r6,SPR_SR |
| 689 | l.nop |
| 690 | l.nop |
| 691 | l.nop |
| 692 | l.nop |
| 693 | l.nop |
| 694 | l.nop |
| 695 | l.nop |
| 696 | l.nop |
| 697 | l.nop |
| 698 | l.nop |
| 699 | 9: |
| 700 | l.jr r9 |
| 701 | l.nop |
| 702 | |
| 703 | _dc_enable: |
| 704 | /* Check if DC present and skip enabling otherwise */ |
| 705 | l.mfspr r24,r0,SPR_UPR |
| 706 | l.andi r26,r24,SPR_UPR_DCP |
| 707 | l.sfeq r26,r0 |
| 708 | l.bf 9f |
| 709 | l.nop |
| 710 | |
| 711 | /* Disable DC */ |
| 712 | l.mfspr r6,r0,SPR_SR |
| 713 | l.addi r5,r0,-1 |
| 714 | l.xori r5,r5,SPR_SR_DCE |
| 715 | l.and r5,r6,r5 |
| 716 | l.mtspr r0,r5,SPR_SR |
| 717 | |
| 718 | /* Establish cache block size |
| 719 | If BS=0, 16; |
| 720 | If BS=1, 32; |
| 721 | r14 contain block size |
| 722 | */ |
| 723 | l.mfspr r24,r0,SPR_DCCFGR |
| 724 | l.andi r26,r24,SPR_DCCFGR_CBS |
| 725 | l.srli r28,r26,7 |
| 726 | l.ori r30,r0,16 |
| 727 | l.sll r14,r30,r28 |
| 728 | |
| 729 | /* Establish number of cache sets |
| 730 | r16 contains number of cache sets |
| 731 | r28 contains log(# of cache sets) |
| 732 | */ |
| 733 | l.andi r26,r24,SPR_DCCFGR_NCS |
| 734 | l.srli r28,r26,3 |
| 735 | l.ori r30,r0,1 |
| 736 | l.sll r16,r30,r28 |
| 737 | |
| 738 | /* Invalidate DC */ |
| 739 | l.addi r6,r0,0 |
| 740 | l.sll r5,r14,r28 |
| 741 | 1: |
| 742 | l.mtspr r0,r6,SPR_DCBIR |
| 743 | l.sfne r6,r5 |
| 744 | l.bf 1b |
| 745 | l.add r6,r6,r14 |
| 746 | |
| 747 | /* Enable DC */ |
| 748 | l.mfspr r6,r0,SPR_SR |
| 749 | l.ori r6,r6,SPR_SR_DCE |
| 750 | l.mtspr r0,r6,SPR_SR |
| 751 | 9: |
| 752 | l.jr r9 |
| 753 | l.nop |
| 754 | |
| 755 | /* ===============================================[ page table masks ]=== */ |
| 756 | |
| 757 | /* bit 4 is used in hardware as write back cache bit. we never use this bit |
| 758 | * explicitly, so we can reuse it as _PAGE_FILE bit and mask it out when |
| 759 | * writing into hardware pte's |
| 760 | */ |
| 761 | |
| 762 | #define DTLB_UP_CONVERT_MASK 0x3fa |
| 763 | #define ITLB_UP_CONVERT_MASK 0x3a |
| 764 | |
| 765 | /* for SMP we'd have (this is a bit subtle, CC must be always set |
| 766 | * for SMP, but since we have _PAGE_PRESENT bit always defined |
| 767 | * we can just modify the mask) |
| 768 | */ |
| 769 | #define DTLB_SMP_CONVERT_MASK 0x3fb |
| 770 | #define ITLB_SMP_CONVERT_MASK 0x3b |
| 771 | |
| 772 | /* ---[ boot dtlb miss handler ]----------------------------------------- */ |
| 773 | |
| 774 | boot_dtlb_miss_handler: |
| 775 | |
| 776 | /* mask for DTLB_MR register: - (0) sets V (valid) bit, |
| 777 | * - (31-12) sets bits belonging to VPN (31-12) |
| 778 | */ |
| 779 | #define DTLB_MR_MASK 0xfffff001 |
| 780 | |
| 781 | /* mask for DTLB_TR register: - (2) sets CI (cache inhibit) bit, |
| 782 | * - (4) sets A (access) bit, |
| 783 | * - (5) sets D (dirty) bit, |
| 784 | * - (8) sets SRE (superuser read) bit |
| 785 | * - (9) sets SWE (superuser write) bit |
| 786 | * - (31-12) sets bits belonging to VPN (31-12) |
| 787 | */ |
| 788 | #define DTLB_TR_MASK 0xfffff332 |
| 789 | |
| 790 | /* These are for masking out the VPN/PPN value from the MR/TR registers... |
| 791 | * it's not the same as the PFN */ |
| 792 | #define VPN_MASK 0xfffff000 |
| 793 | #define PPN_MASK 0xfffff000 |
| 794 | |
| 795 | |
| 796 | EXCEPTION_STORE_GPR6 |
| 797 | |
| 798 | #if 0 |
| 799 | l.mfspr r6,r0,SPR_ESR_BASE // |
| 800 | l.andi r6,r6,SPR_SR_SM // are we in kernel mode ? |
| 801 | l.sfeqi r6,0 // r6 == 0x1 --> SM |
| 802 | l.bf exit_with_no_dtranslation // |
| 803 | l.nop |
| 804 | #endif |
| 805 | |
| 806 | /* this could be optimized by moving storing of |
| 807 | * non r6 registers here, and jumping r6 restore |
| 808 | * if not in supervisor mode |
| 809 | */ |
| 810 | |
| 811 | EXCEPTION_STORE_GPR2 |
| 812 | EXCEPTION_STORE_GPR3 |
| 813 | EXCEPTION_STORE_GPR4 |
| 814 | EXCEPTION_STORE_GPR5 |
| 815 | |
| 816 | l.mfspr r4,r0,SPR_EEAR_BASE // get the offending EA |
| 817 | |
| 818 | immediate_translation: |
| 819 | CLEAR_GPR(r6) |
| 820 | |
| 821 | l.srli r3,r4,0xd // r3 <- r4 / 8192 (sets are relative to page size (8Kb) NOT VPN size (4Kb) |
| 822 | |
| 823 | l.mfspr r6, r0, SPR_DMMUCFGR |
| 824 | l.andi r6, r6, SPR_DMMUCFGR_NTS |
| 825 | l.srli r6, r6, SPR_DMMUCFGR_NTS_OFF |
| 826 | l.ori r5, r0, 0x1 |
| 827 | l.sll r5, r5, r6 // r5 = number DMMU sets |
| 828 | l.addi r6, r5, -1 // r6 = nsets mask |
| 829 | l.and r2, r3, r6 // r2 <- r3 % NSETS_MASK |
| 830 | |
| 831 | l.or r6,r6,r4 // r6 <- r4 |
| 832 | l.ori r6,r6,~(VPN_MASK) // r6 <- VPN :VPN .xfff - clear up lo(r6) to 0x**** *fff |
| 833 | l.movhi r5,hi(DTLB_MR_MASK) // r5 <- ffff:0000.x000 |
| 834 | l.ori r5,r5,lo(DTLB_MR_MASK) // r5 <- ffff:1111.x001 - apply DTLB_MR_MASK |
| 835 | l.and r5,r5,r6 // r5 <- VPN :VPN .x001 - we have DTLBMR entry |
| 836 | l.mtspr r2,r5,SPR_DTLBMR_BASE(0) // set DTLBMR |
| 837 | |
| 838 | /* set up DTLB with no translation for EA <= 0xbfffffff */ |
| 839 | LOAD_SYMBOL_2_GPR(r6,0xbfffffff) |
| 840 | l.sfgeu r6,r4 // flag if r6 >= r4 (if 0xbfffffff >= EA) |
| 841 | l.bf 1f // goto out |
| 842 | l.and r3,r4,r4 // delay slot :: 24 <- r4 (if flag==1) |
| 843 | |
| 844 | tophys(r3,r4) // r3 <- PA |
| 845 | 1: |
| 846 | l.ori r3,r3,~(PPN_MASK) // r3 <- PPN :PPN .xfff - clear up lo(r6) to 0x**** *fff |
| 847 | l.movhi r5,hi(DTLB_TR_MASK) // r5 <- ffff:0000.x000 |
| 848 | l.ori r5,r5,lo(DTLB_TR_MASK) // r5 <- ffff:1111.x330 - apply DTLB_MR_MASK |
| 849 | l.and r5,r5,r3 // r5 <- PPN :PPN .x330 - we have DTLBTR entry |
| 850 | l.mtspr r2,r5,SPR_DTLBTR_BASE(0) // set DTLBTR |
| 851 | |
| 852 | EXCEPTION_LOAD_GPR6 |
| 853 | EXCEPTION_LOAD_GPR5 |
| 854 | EXCEPTION_LOAD_GPR4 |
| 855 | EXCEPTION_LOAD_GPR3 |
| 856 | EXCEPTION_LOAD_GPR2 |
| 857 | |
| 858 | l.rfe // SR <- ESR, PC <- EPC |
| 859 | |
| 860 | exit_with_no_dtranslation: |
| 861 | /* EA out of memory or not in supervisor mode */ |
| 862 | EXCEPTION_LOAD_GPR6 |
| 863 | EXCEPTION_LOAD_GPR4 |
| 864 | l.j _dispatch_bus_fault |
| 865 | |
| 866 | /* ---[ boot itlb miss handler ]----------------------------------------- */ |
| 867 | |
| 868 | boot_itlb_miss_handler: |
| 869 | |
| 870 | /* mask for ITLB_MR register: - sets V (valid) bit, |
| 871 | * - sets bits belonging to VPN (15-12) |
| 872 | */ |
| 873 | #define ITLB_MR_MASK 0xfffff001 |
| 874 | |
| 875 | /* mask for ITLB_TR register: - sets A (access) bit, |
| 876 | * - sets SXE (superuser execute) bit |
| 877 | * - sets bits belonging to VPN (15-12) |
| 878 | */ |
| 879 | #define ITLB_TR_MASK 0xfffff050 |
| 880 | |
| 881 | /* |
| 882 | #define VPN_MASK 0xffffe000 |
| 883 | #define PPN_MASK 0xffffe000 |
| 884 | */ |
| 885 | |
| 886 | |
| 887 | |
| 888 | EXCEPTION_STORE_GPR2 |
| 889 | EXCEPTION_STORE_GPR3 |
| 890 | EXCEPTION_STORE_GPR4 |
| 891 | EXCEPTION_STORE_GPR5 |
| 892 | EXCEPTION_STORE_GPR6 |
| 893 | |
| 894 | #if 0 |
| 895 | l.mfspr r6,r0,SPR_ESR_BASE // |
| 896 | l.andi r6,r6,SPR_SR_SM // are we in kernel mode ? |
| 897 | l.sfeqi r6,0 // r6 == 0x1 --> SM |
| 898 | l.bf exit_with_no_itranslation |
| 899 | l.nop |
| 900 | #endif |
| 901 | |
| 902 | |
| 903 | l.mfspr r4,r0,SPR_EEAR_BASE // get the offending EA |
| 904 | |
| 905 | earlyearly: |
| 906 | CLEAR_GPR(r6) |
| 907 | |
| 908 | l.srli r3,r4,0xd // r3 <- r4 / 8192 (sets are relative to page size (8Kb) NOT VPN size (4Kb) |
| 909 | |
| 910 | l.mfspr r6, r0, SPR_IMMUCFGR |
| 911 | l.andi r6, r6, SPR_IMMUCFGR_NTS |
| 912 | l.srli r6, r6, SPR_IMMUCFGR_NTS_OFF |
| 913 | l.ori r5, r0, 0x1 |
| 914 | l.sll r5, r5, r6 // r5 = number IMMU sets from IMMUCFGR |
| 915 | l.addi r6, r5, -1 // r6 = nsets mask |
| 916 | l.and r2, r3, r6 // r2 <- r3 % NSETS_MASK |
| 917 | |
| 918 | l.or r6,r6,r4 // r6 <- r4 |
| 919 | l.ori r6,r6,~(VPN_MASK) // r6 <- VPN :VPN .xfff - clear up lo(r6) to 0x**** *fff |
| 920 | l.movhi r5,hi(ITLB_MR_MASK) // r5 <- ffff:0000.x000 |
| 921 | l.ori r5,r5,lo(ITLB_MR_MASK) // r5 <- ffff:1111.x001 - apply ITLB_MR_MASK |
| 922 | l.and r5,r5,r6 // r5 <- VPN :VPN .x001 - we have ITLBMR entry |
| 923 | l.mtspr r2,r5,SPR_ITLBMR_BASE(0) // set ITLBMR |
| 924 | |
| 925 | /* |
| 926 | * set up ITLB with no translation for EA <= 0x0fffffff |
| 927 | * |
| 928 | * we need this for head.S mapping (EA = PA). if we move all functions |
| 929 | * which run with mmu enabled into entry.S, we might be able to eliminate this. |
| 930 | * |
| 931 | */ |
| 932 | LOAD_SYMBOL_2_GPR(r6,0x0fffffff) |
| 933 | l.sfgeu r6,r4 // flag if r6 >= r4 (if 0xb0ffffff >= EA) |
| 934 | l.bf 1f // goto out |
| 935 | l.and r3,r4,r4 // delay slot :: 24 <- r4 (if flag==1) |
| 936 | |
| 937 | tophys(r3,r4) // r3 <- PA |
| 938 | 1: |
| 939 | l.ori r3,r3,~(PPN_MASK) // r3 <- PPN :PPN .xfff - clear up lo(r6) to 0x**** *fff |
| 940 | l.movhi r5,hi(ITLB_TR_MASK) // r5 <- ffff:0000.x000 |
| 941 | l.ori r5,r5,lo(ITLB_TR_MASK) // r5 <- ffff:1111.x050 - apply ITLB_MR_MASK |
| 942 | l.and r5,r5,r3 // r5 <- PPN :PPN .x050 - we have ITLBTR entry |
| 943 | l.mtspr r2,r5,SPR_ITLBTR_BASE(0) // set ITLBTR |
| 944 | |
| 945 | EXCEPTION_LOAD_GPR6 |
| 946 | EXCEPTION_LOAD_GPR5 |
| 947 | EXCEPTION_LOAD_GPR4 |
| 948 | EXCEPTION_LOAD_GPR3 |
| 949 | EXCEPTION_LOAD_GPR2 |
| 950 | |
| 951 | l.rfe // SR <- ESR, PC <- EPC |
| 952 | |
| 953 | exit_with_no_itranslation: |
| 954 | EXCEPTION_LOAD_GPR4 |
| 955 | EXCEPTION_LOAD_GPR6 |
| 956 | l.j _dispatch_bus_fault |
| 957 | l.nop |
| 958 | |
| 959 | /* ====================================================================== */ |
| 960 | /* |
| 961 | * Stuff below here shouldn't go into .head section... maybe this stuff |
| 962 | * can be moved to entry.S ??? |
| 963 | */ |
| 964 | |
| 965 | /* ==============================================[ DTLB miss handler ]=== */ |
| 966 | |
| 967 | /* |
| 968 | * Comments: |
| 969 | * Exception handlers are entered with MMU off so the following handler |
| 970 | * needs to use physical addressing |
| 971 | * |
| 972 | */ |
| 973 | |
| 974 | .text |
| 975 | ENTRY(dtlb_miss_handler) |
| 976 | EXCEPTION_STORE_GPR2 |
| 977 | EXCEPTION_STORE_GPR3 |
| 978 | EXCEPTION_STORE_GPR4 |
| 979 | EXCEPTION_STORE_GPR5 |
| 980 | EXCEPTION_STORE_GPR6 |
| 981 | /* |
| 982 | * get EA of the miss |
| 983 | */ |
| 984 | l.mfspr r2,r0,SPR_EEAR_BASE |
| 985 | /* |
| 986 | * pmd = (pmd_t *)(current_pgd + pgd_index(daddr)); |
| 987 | */ |
| 988 | GET_CURRENT_PGD(r3,r5) // r3 is current_pgd, r5 is temp |
| 989 | l.srli r4,r2,0x18 // >> PAGE_SHIFT + (PAGE_SHIFT - 2) |
| 990 | l.slli r4,r4,0x2 // to get address << 2 |
| 991 | l.add r5,r4,r3 // r4 is pgd_index(daddr) |
| 992 | /* |
| 993 | * if (pmd_none(*pmd)) |
| 994 | * goto pmd_none: |
| 995 | */ |
| 996 | tophys (r4,r5) |
| 997 | l.lwz r3,0x0(r4) // get *pmd value |
| 998 | l.sfne r3,r0 |
| 999 | l.bnf d_pmd_none |
| 1000 | l.andi r3,r3,~PAGE_MASK //0x1fff // ~PAGE_MASK |
| 1001 | /* |
| 1002 | * if (pmd_bad(*pmd)) |
| 1003 | * pmd_clear(pmd) |
| 1004 | * goto pmd_bad: |
| 1005 | */ |
| 1006 | // l.sfeq r3,r0 // check *pmd value |
| 1007 | // l.bf d_pmd_good |
| 1008 | l.addi r3,r0,0xffffe000 // PAGE_MASK |
| 1009 | // l.j d_pmd_bad |
| 1010 | // l.sw 0x0(r4),r0 // clear pmd |
| 1011 | d_pmd_good: |
| 1012 | /* |
| 1013 | * pte = *pte_offset(pmd, daddr); |
| 1014 | */ |
| 1015 | l.lwz r4,0x0(r4) // get **pmd value |
| 1016 | l.and r4,r4,r3 // & PAGE_MASK |
| 1017 | l.srli r5,r2,0xd // >> PAGE_SHIFT, r2 == EEAR |
| 1018 | l.andi r3,r5,0x7ff // (1UL << PAGE_SHIFT - 2) - 1 |
| 1019 | l.slli r3,r3,0x2 // to get address << 2 |
| 1020 | l.add r3,r3,r4 |
| 1021 | l.lwz r2,0x0(r3) // this is pte at last |
| 1022 | /* |
| 1023 | * if (!pte_present(pte)) |
| 1024 | */ |
| 1025 | l.andi r4,r2,0x1 |
| 1026 | l.sfne r4,r0 // is pte present |
| 1027 | l.bnf d_pte_not_present |
| 1028 | l.addi r3,r0,0xffffe3fa // PAGE_MASK | DTLB_UP_CONVERT_MASK |
| 1029 | /* |
| 1030 | * fill DTLB TR register |
| 1031 | */ |
| 1032 | l.and r4,r2,r3 // apply the mask |
| 1033 | // Determine number of DMMU sets |
| 1034 | l.mfspr r6, r0, SPR_DMMUCFGR |
| 1035 | l.andi r6, r6, SPR_DMMUCFGR_NTS |
| 1036 | l.srli r6, r6, SPR_DMMUCFGR_NTS_OFF |
| 1037 | l.ori r3, r0, 0x1 |
| 1038 | l.sll r3, r3, r6 // r3 = number DMMU sets DMMUCFGR |
| 1039 | l.addi r6, r3, -1 // r6 = nsets mask |
| 1040 | l.and r5, r5, r6 // calc offset: & (NUM_TLB_ENTRIES-1) |
| 1041 | //NUM_TLB_ENTRIES |
| 1042 | l.mtspr r5,r4,SPR_DTLBTR_BASE(0) |
| 1043 | /* |
| 1044 | * fill DTLB MR register |
| 1045 | */ |
| 1046 | l.mfspr r2,r0,SPR_EEAR_BASE |
| 1047 | l.addi r3,r0,0xffffe000 // PAGE_MASK |
| 1048 | l.and r4,r2,r3 // apply PAGE_MASK to EA (__PHX__ do we really need this?) |
| 1049 | l.ori r4,r4,0x1 // set hardware valid bit: DTBL_MR entry |
| 1050 | l.mtspr r5,r4,SPR_DTLBMR_BASE(0) |
| 1051 | |
| 1052 | EXCEPTION_LOAD_GPR2 |
| 1053 | EXCEPTION_LOAD_GPR3 |
| 1054 | EXCEPTION_LOAD_GPR4 |
| 1055 | EXCEPTION_LOAD_GPR5 |
| 1056 | EXCEPTION_LOAD_GPR6 |
| 1057 | l.rfe |
| 1058 | d_pmd_bad: |
| 1059 | l.nop 1 |
| 1060 | EXCEPTION_LOAD_GPR2 |
| 1061 | EXCEPTION_LOAD_GPR3 |
| 1062 | EXCEPTION_LOAD_GPR4 |
| 1063 | EXCEPTION_LOAD_GPR5 |
| 1064 | EXCEPTION_LOAD_GPR6 |
| 1065 | l.rfe |
| 1066 | d_pmd_none: |
| 1067 | d_pte_not_present: |
| 1068 | EXCEPTION_LOAD_GPR2 |
| 1069 | EXCEPTION_LOAD_GPR3 |
| 1070 | EXCEPTION_LOAD_GPR4 |
| 1071 | EXCEPTION_LOAD_GPR5 |
| 1072 | EXCEPTION_LOAD_GPR6 |
Jonas Bonn | a81252d | 2013-02-14 16:16:49 +0100 | [diff] [blame] | 1073 | EXCEPTION_HANDLE(_dtlb_miss_page_fault_handler) |
Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 1074 | |
| 1075 | /* ==============================================[ ITLB miss handler ]=== */ |
| 1076 | ENTRY(itlb_miss_handler) |
| 1077 | EXCEPTION_STORE_GPR2 |
| 1078 | EXCEPTION_STORE_GPR3 |
| 1079 | EXCEPTION_STORE_GPR4 |
| 1080 | EXCEPTION_STORE_GPR5 |
| 1081 | EXCEPTION_STORE_GPR6 |
| 1082 | /* |
| 1083 | * get EA of the miss |
| 1084 | */ |
| 1085 | l.mfspr r2,r0,SPR_EEAR_BASE |
| 1086 | |
| 1087 | /* |
| 1088 | * pmd = (pmd_t *)(current_pgd + pgd_index(daddr)); |
| 1089 | * |
| 1090 | */ |
| 1091 | GET_CURRENT_PGD(r3,r5) // r3 is current_pgd, r5 is temp |
| 1092 | l.srli r4,r2,0x18 // >> PAGE_SHIFT + (PAGE_SHIFT - 2) |
| 1093 | l.slli r4,r4,0x2 // to get address << 2 |
| 1094 | l.add r5,r4,r3 // r4 is pgd_index(daddr) |
| 1095 | /* |
| 1096 | * if (pmd_none(*pmd)) |
| 1097 | * goto pmd_none: |
| 1098 | */ |
| 1099 | tophys (r4,r5) |
| 1100 | l.lwz r3,0x0(r4) // get *pmd value |
| 1101 | l.sfne r3,r0 |
| 1102 | l.bnf i_pmd_none |
| 1103 | l.andi r3,r3,0x1fff // ~PAGE_MASK |
| 1104 | /* |
| 1105 | * if (pmd_bad(*pmd)) |
| 1106 | * pmd_clear(pmd) |
| 1107 | * goto pmd_bad: |
| 1108 | */ |
| 1109 | |
| 1110 | // l.sfeq r3,r0 // check *pmd value |
| 1111 | // l.bf i_pmd_good |
| 1112 | l.addi r3,r0,0xffffe000 // PAGE_MASK |
| 1113 | // l.j i_pmd_bad |
| 1114 | // l.sw 0x0(r4),r0 // clear pmd |
| 1115 | |
| 1116 | i_pmd_good: |
| 1117 | /* |
| 1118 | * pte = *pte_offset(pmd, iaddr); |
| 1119 | * |
| 1120 | */ |
| 1121 | l.lwz r4,0x0(r4) // get **pmd value |
| 1122 | l.and r4,r4,r3 // & PAGE_MASK |
| 1123 | l.srli r5,r2,0xd // >> PAGE_SHIFT, r2 == EEAR |
| 1124 | l.andi r3,r5,0x7ff // (1UL << PAGE_SHIFT - 2) - 1 |
| 1125 | l.slli r3,r3,0x2 // to get address << 2 |
| 1126 | l.add r3,r3,r4 |
| 1127 | l.lwz r2,0x0(r3) // this is pte at last |
| 1128 | /* |
| 1129 | * if (!pte_present(pte)) |
| 1130 | * |
| 1131 | */ |
| 1132 | l.andi r4,r2,0x1 |
| 1133 | l.sfne r4,r0 // is pte present |
| 1134 | l.bnf i_pte_not_present |
| 1135 | l.addi r3,r0,0xffffe03a // PAGE_MASK | ITLB_UP_CONVERT_MASK |
| 1136 | /* |
| 1137 | * fill ITLB TR register |
| 1138 | */ |
| 1139 | l.and r4,r2,r3 // apply the mask |
| 1140 | l.andi r3,r2,0x7c0 // _PAGE_EXEC | _PAGE_SRE | _PAGE_SWE | _PAGE_URE | _PAGE_UWE |
| 1141 | // l.andi r3,r2,0x400 // _PAGE_EXEC |
| 1142 | l.sfeq r3,r0 |
| 1143 | l.bf itlb_tr_fill //_workaround |
| 1144 | // Determine number of IMMU sets |
| 1145 | l.mfspr r6, r0, SPR_IMMUCFGR |
| 1146 | l.andi r6, r6, SPR_IMMUCFGR_NTS |
| 1147 | l.srli r6, r6, SPR_IMMUCFGR_NTS_OFF |
| 1148 | l.ori r3, r0, 0x1 |
| 1149 | l.sll r3, r3, r6 // r3 = number IMMU sets IMMUCFGR |
| 1150 | l.addi r6, r3, -1 // r6 = nsets mask |
| 1151 | l.and r5, r5, r6 // calc offset: & (NUM_TLB_ENTRIES-1) |
| 1152 | |
| 1153 | /* |
| 1154 | * __PHX__ :: fixme |
| 1155 | * we should not just blindly set executable flags, |
| 1156 | * but it does help with ping. the clean way would be to find out |
| 1157 | * (and fix it) why stack doesn't have execution permissions |
| 1158 | */ |
| 1159 | |
| 1160 | itlb_tr_fill_workaround: |
| 1161 | l.ori r4,r4,0xc0 // | (SPR_ITLBTR_UXE | ITLBTR_SXE) |
| 1162 | itlb_tr_fill: |
| 1163 | l.mtspr r5,r4,SPR_ITLBTR_BASE(0) |
| 1164 | /* |
| 1165 | * fill DTLB MR register |
| 1166 | */ |
| 1167 | l.mfspr r2,r0,SPR_EEAR_BASE |
| 1168 | l.addi r3,r0,0xffffe000 // PAGE_MASK |
| 1169 | l.and r4,r2,r3 // apply PAGE_MASK to EA (__PHX__ do we really need this?) |
| 1170 | l.ori r4,r4,0x1 // set hardware valid bit: DTBL_MR entry |
| 1171 | l.mtspr r5,r4,SPR_ITLBMR_BASE(0) |
| 1172 | |
| 1173 | EXCEPTION_LOAD_GPR2 |
| 1174 | EXCEPTION_LOAD_GPR3 |
| 1175 | EXCEPTION_LOAD_GPR4 |
| 1176 | EXCEPTION_LOAD_GPR5 |
| 1177 | EXCEPTION_LOAD_GPR6 |
| 1178 | l.rfe |
| 1179 | |
| 1180 | i_pmd_bad: |
| 1181 | l.nop 1 |
| 1182 | EXCEPTION_LOAD_GPR2 |
| 1183 | EXCEPTION_LOAD_GPR3 |
| 1184 | EXCEPTION_LOAD_GPR4 |
| 1185 | EXCEPTION_LOAD_GPR5 |
| 1186 | EXCEPTION_LOAD_GPR6 |
| 1187 | l.rfe |
| 1188 | i_pmd_none: |
| 1189 | i_pte_not_present: |
| 1190 | EXCEPTION_LOAD_GPR2 |
| 1191 | EXCEPTION_LOAD_GPR3 |
| 1192 | EXCEPTION_LOAD_GPR4 |
| 1193 | EXCEPTION_LOAD_GPR5 |
| 1194 | EXCEPTION_LOAD_GPR6 |
Jonas Bonn | a81252d | 2013-02-14 16:16:49 +0100 | [diff] [blame] | 1195 | EXCEPTION_HANDLE(_itlb_miss_page_fault_handler) |
Jonas Bonn | 9d02a42 | 2011-06-04 11:05:39 +0300 | [diff] [blame] | 1196 | |
| 1197 | /* ==============================================[ boot tlb handlers ]=== */ |
| 1198 | |
| 1199 | |
| 1200 | /* =================================================[ debugging aids ]=== */ |
| 1201 | |
| 1202 | .align 64 |
| 1203 | _immu_trampoline: |
| 1204 | .space 64 |
| 1205 | _immu_trampoline_top: |
| 1206 | |
| 1207 | #define TRAMP_SLOT_0 (0x0) |
| 1208 | #define TRAMP_SLOT_1 (0x4) |
| 1209 | #define TRAMP_SLOT_2 (0x8) |
| 1210 | #define TRAMP_SLOT_3 (0xc) |
| 1211 | #define TRAMP_SLOT_4 (0x10) |
| 1212 | #define TRAMP_SLOT_5 (0x14) |
| 1213 | #define TRAMP_FRAME_SIZE (0x18) |
| 1214 | |
| 1215 | ENTRY(_immu_trampoline_workaround) |
| 1216 | // r2 EEA |
| 1217 | // r6 is physical EEA |
| 1218 | tophys(r6,r2) |
| 1219 | |
| 1220 | LOAD_SYMBOL_2_GPR(r5,_immu_trampoline) |
| 1221 | tophys (r3,r5) // r3 is trampoline (physical) |
| 1222 | |
| 1223 | LOAD_SYMBOL_2_GPR(r4,0x15000000) |
| 1224 | l.sw TRAMP_SLOT_0(r3),r4 |
| 1225 | l.sw TRAMP_SLOT_1(r3),r4 |
| 1226 | l.sw TRAMP_SLOT_4(r3),r4 |
| 1227 | l.sw TRAMP_SLOT_5(r3),r4 |
| 1228 | |
| 1229 | // EPC = EEA - 0x4 |
| 1230 | l.lwz r4,0x0(r6) // load op @ EEA + 0x0 (fc address) |
| 1231 | l.sw TRAMP_SLOT_3(r3),r4 // store it to _immu_trampoline_data |
| 1232 | l.lwz r4,-0x4(r6) // load op @ EEA - 0x4 (f8 address) |
| 1233 | l.sw TRAMP_SLOT_2(r3),r4 // store it to _immu_trampoline_data |
| 1234 | |
| 1235 | l.srli r5,r4,26 // check opcode for write access |
| 1236 | l.sfeqi r5,0 // l.j |
| 1237 | l.bf 0f |
| 1238 | l.sfeqi r5,0x11 // l.jr |
| 1239 | l.bf 1f |
| 1240 | l.sfeqi r5,1 // l.jal |
| 1241 | l.bf 2f |
| 1242 | l.sfeqi r5,0x12 // l.jalr |
| 1243 | l.bf 3f |
| 1244 | l.sfeqi r5,3 // l.bnf |
| 1245 | l.bf 4f |
| 1246 | l.sfeqi r5,4 // l.bf |
| 1247 | l.bf 5f |
| 1248 | 99: |
| 1249 | l.nop |
| 1250 | l.j 99b // should never happen |
| 1251 | l.nop 1 |
| 1252 | |
| 1253 | // r2 is EEA |
| 1254 | // r3 is trampoline address (physical) |
| 1255 | // r4 is instruction |
| 1256 | // r6 is physical(EEA) |
| 1257 | // |
| 1258 | // r5 |
| 1259 | |
| 1260 | 2: // l.jal |
| 1261 | |
| 1262 | /* 19 20 aa aa l.movhi r9,0xaaaa |
| 1263 | * a9 29 bb bb l.ori r9,0xbbbb |
| 1264 | * |
| 1265 | * where 0xaaaabbbb is EEA + 0x4 shifted right 2 |
| 1266 | */ |
| 1267 | |
| 1268 | l.addi r6,r2,0x4 // this is 0xaaaabbbb |
| 1269 | |
| 1270 | // l.movhi r9,0xaaaa |
| 1271 | l.ori r5,r0,0x1920 // 0x1920 == l.movhi r9 |
| 1272 | l.sh (TRAMP_SLOT_0+0x0)(r3),r5 |
| 1273 | l.srli r5,r6,16 |
| 1274 | l.sh (TRAMP_SLOT_0+0x2)(r3),r5 |
| 1275 | |
| 1276 | // l.ori r9,0xbbbb |
| 1277 | l.ori r5,r0,0xa929 // 0xa929 == l.ori r9 |
| 1278 | l.sh (TRAMP_SLOT_1+0x0)(r3),r5 |
| 1279 | l.andi r5,r6,0xffff |
| 1280 | l.sh (TRAMP_SLOT_1+0x2)(r3),r5 |
| 1281 | |
| 1282 | /* falthrough, need to set up new jump offset */ |
| 1283 | |
| 1284 | |
| 1285 | 0: // l.j |
| 1286 | l.slli r6,r4,6 // original offset shifted left 6 - 2 |
| 1287 | // l.srli r6,r6,6 // original offset shifted right 2 |
| 1288 | |
| 1289 | l.slli r4,r2,4 // old jump position: EEA shifted left 4 |
| 1290 | // l.srli r4,r4,6 // old jump position: shifted right 2 |
| 1291 | |
| 1292 | l.addi r5,r3,0xc // new jump position (physical) |
| 1293 | l.slli r5,r5,4 // new jump position: shifted left 4 |
| 1294 | |
| 1295 | // calculate new jump offset |
| 1296 | // new_off = old_off + (old_jump - new_jump) |
| 1297 | |
| 1298 | l.sub r5,r4,r5 // old_jump - new_jump |
| 1299 | l.add r5,r6,r5 // orig_off + (old_jump - new_jump) |
| 1300 | l.srli r5,r5,6 // new offset shifted right 2 |
| 1301 | |
| 1302 | // r5 is new jump offset |
| 1303 | // l.j has opcode 0x0... |
| 1304 | l.sw TRAMP_SLOT_2(r3),r5 // write it back |
| 1305 | |
| 1306 | l.j trampoline_out |
| 1307 | l.nop |
| 1308 | |
| 1309 | /* ----------------------------- */ |
| 1310 | |
| 1311 | 3: // l.jalr |
| 1312 | |
| 1313 | /* 19 20 aa aa l.movhi r9,0xaaaa |
| 1314 | * a9 29 bb bb l.ori r9,0xbbbb |
| 1315 | * |
| 1316 | * where 0xaaaabbbb is EEA + 0x4 shifted right 2 |
| 1317 | */ |
| 1318 | |
| 1319 | l.addi r6,r2,0x4 // this is 0xaaaabbbb |
| 1320 | |
| 1321 | // l.movhi r9,0xaaaa |
| 1322 | l.ori r5,r0,0x1920 // 0x1920 == l.movhi r9 |
| 1323 | l.sh (TRAMP_SLOT_0+0x0)(r3),r5 |
| 1324 | l.srli r5,r6,16 |
| 1325 | l.sh (TRAMP_SLOT_0+0x2)(r3),r5 |
| 1326 | |
| 1327 | // l.ori r9,0xbbbb |
| 1328 | l.ori r5,r0,0xa929 // 0xa929 == l.ori r9 |
| 1329 | l.sh (TRAMP_SLOT_1+0x0)(r3),r5 |
| 1330 | l.andi r5,r6,0xffff |
| 1331 | l.sh (TRAMP_SLOT_1+0x2)(r3),r5 |
| 1332 | |
| 1333 | l.lhz r5,(TRAMP_SLOT_2+0x0)(r3) // load hi part of jump instruction |
| 1334 | l.andi r5,r5,0x3ff // clear out opcode part |
| 1335 | l.ori r5,r5,0x4400 // opcode changed from l.jalr -> l.jr |
| 1336 | l.sh (TRAMP_SLOT_2+0x0)(r3),r5 // write it back |
| 1337 | |
| 1338 | /* falthrough */ |
| 1339 | |
| 1340 | 1: // l.jr |
| 1341 | l.j trampoline_out |
| 1342 | l.nop |
| 1343 | |
| 1344 | /* ----------------------------- */ |
| 1345 | |
| 1346 | 4: // l.bnf |
| 1347 | 5: // l.bf |
| 1348 | l.slli r6,r4,6 // original offset shifted left 6 - 2 |
| 1349 | // l.srli r6,r6,6 // original offset shifted right 2 |
| 1350 | |
| 1351 | l.slli r4,r2,4 // old jump position: EEA shifted left 4 |
| 1352 | // l.srli r4,r4,6 // old jump position: shifted right 2 |
| 1353 | |
| 1354 | l.addi r5,r3,0xc // new jump position (physical) |
| 1355 | l.slli r5,r5,4 // new jump position: shifted left 4 |
| 1356 | |
| 1357 | // calculate new jump offset |
| 1358 | // new_off = old_off + (old_jump - new_jump) |
| 1359 | |
| 1360 | l.add r6,r6,r4 // (orig_off + old_jump) |
| 1361 | l.sub r6,r6,r5 // (orig_off + old_jump) - new_jump |
| 1362 | l.srli r6,r6,6 // new offset shifted right 2 |
| 1363 | |
| 1364 | // r6 is new jump offset |
| 1365 | l.lwz r4,(TRAMP_SLOT_2+0x0)(r3) // load jump instruction |
| 1366 | l.srli r4,r4,16 |
| 1367 | l.andi r4,r4,0xfc00 // get opcode part |
| 1368 | l.slli r4,r4,16 |
| 1369 | l.or r6,r4,r6 // l.b(n)f new offset |
| 1370 | l.sw TRAMP_SLOT_2(r3),r6 // write it back |
| 1371 | |
| 1372 | /* we need to add l.j to EEA + 0x8 */ |
| 1373 | tophys (r4,r2) // may not be needed (due to shifts down_ |
| 1374 | l.addi r4,r4,(0x8 - 0x8) // jump target = r2 + 0x8 (compensate for 0x8) |
| 1375 | // jump position = r5 + 0x8 (0x8 compensated) |
| 1376 | l.sub r4,r4,r5 // jump offset = target - new_position + 0x8 |
| 1377 | |
| 1378 | l.slli r4,r4,4 // the amount of info in imediate of jump |
| 1379 | l.srli r4,r4,6 // jump instruction with offset |
| 1380 | l.sw TRAMP_SLOT_4(r3),r4 // write it to 4th slot |
| 1381 | |
| 1382 | /* fallthrough */ |
| 1383 | |
| 1384 | trampoline_out: |
| 1385 | // set up new EPC to point to our trampoline code |
| 1386 | LOAD_SYMBOL_2_GPR(r5,_immu_trampoline) |
| 1387 | l.mtspr r0,r5,SPR_EPCR_BASE |
| 1388 | |
| 1389 | // immu_trampoline is (4x) CACHE_LINE aligned |
| 1390 | // and only 6 instructions long, |
| 1391 | // so we need to invalidate only 2 lines |
| 1392 | |
| 1393 | /* Establish cache block size |
| 1394 | If BS=0, 16; |
| 1395 | If BS=1, 32; |
| 1396 | r14 contain block size |
| 1397 | */ |
| 1398 | l.mfspr r21,r0,SPR_ICCFGR |
| 1399 | l.andi r21,r21,SPR_ICCFGR_CBS |
| 1400 | l.srli r21,r21,7 |
| 1401 | l.ori r23,r0,16 |
| 1402 | l.sll r14,r23,r21 |
| 1403 | |
| 1404 | l.mtspr r0,r5,SPR_ICBIR |
| 1405 | l.add r5,r5,r14 |
| 1406 | l.mtspr r0,r5,SPR_ICBIR |
| 1407 | |
| 1408 | l.jr r9 |
| 1409 | l.nop |
| 1410 | |
| 1411 | |
| 1412 | /* |
| 1413 | * DSCR: prints a string referenced by r3. |
| 1414 | * |
| 1415 | * PRMS: r3 - address of the first character of null |
| 1416 | * terminated string to be printed |
| 1417 | * |
| 1418 | * PREQ: UART at UART_BASE_ADD has to be initialized |
| 1419 | * |
| 1420 | * POST: caller should be aware that r3, r9 are changed |
| 1421 | */ |
| 1422 | ENTRY(_emergency_print) |
| 1423 | EMERGENCY_PRINT_STORE_GPR4 |
| 1424 | EMERGENCY_PRINT_STORE_GPR5 |
| 1425 | EMERGENCY_PRINT_STORE_GPR6 |
| 1426 | EMERGENCY_PRINT_STORE_GPR7 |
| 1427 | 2: |
| 1428 | l.lbz r7,0(r3) |
| 1429 | l.sfeq r7,r0 |
| 1430 | l.bf 9f |
| 1431 | l.nop |
| 1432 | |
| 1433 | // putc: |
| 1434 | l.movhi r4,hi(UART_BASE_ADD) |
| 1435 | |
| 1436 | l.addi r6,r0,0x20 |
| 1437 | 1: l.lbz r5,5(r4) |
| 1438 | l.andi r5,r5,0x20 |
| 1439 | l.sfeq r5,r6 |
| 1440 | l.bnf 1b |
| 1441 | l.nop |
| 1442 | |
| 1443 | l.sb 0(r4),r7 |
| 1444 | |
| 1445 | l.addi r6,r0,0x60 |
| 1446 | 1: l.lbz r5,5(r4) |
| 1447 | l.andi r5,r5,0x60 |
| 1448 | l.sfeq r5,r6 |
| 1449 | l.bnf 1b |
| 1450 | l.nop |
| 1451 | |
| 1452 | /* next character */ |
| 1453 | l.j 2b |
| 1454 | l.addi r3,r3,0x1 |
| 1455 | |
| 1456 | 9: |
| 1457 | EMERGENCY_PRINT_LOAD_GPR7 |
| 1458 | EMERGENCY_PRINT_LOAD_GPR6 |
| 1459 | EMERGENCY_PRINT_LOAD_GPR5 |
| 1460 | EMERGENCY_PRINT_LOAD_GPR4 |
| 1461 | l.jr r9 |
| 1462 | l.nop |
| 1463 | |
| 1464 | ENTRY(_emergency_print_nr) |
| 1465 | EMERGENCY_PRINT_STORE_GPR4 |
| 1466 | EMERGENCY_PRINT_STORE_GPR5 |
| 1467 | EMERGENCY_PRINT_STORE_GPR6 |
| 1468 | EMERGENCY_PRINT_STORE_GPR7 |
| 1469 | EMERGENCY_PRINT_STORE_GPR8 |
| 1470 | |
| 1471 | l.addi r8,r0,32 // shift register |
| 1472 | |
| 1473 | 1: /* remove leading zeros */ |
| 1474 | l.addi r8,r8,-0x4 |
| 1475 | l.srl r7,r3,r8 |
| 1476 | l.andi r7,r7,0xf |
| 1477 | |
| 1478 | /* don't skip the last zero if number == 0x0 */ |
| 1479 | l.sfeqi r8,0x4 |
| 1480 | l.bf 2f |
| 1481 | l.nop |
| 1482 | |
| 1483 | l.sfeq r7,r0 |
| 1484 | l.bf 1b |
| 1485 | l.nop |
| 1486 | |
| 1487 | 2: |
| 1488 | l.srl r7,r3,r8 |
| 1489 | |
| 1490 | l.andi r7,r7,0xf |
| 1491 | l.sflts r8,r0 |
| 1492 | l.bf 9f |
| 1493 | |
| 1494 | l.sfgtui r7,0x9 |
| 1495 | l.bnf 8f |
| 1496 | l.nop |
| 1497 | l.addi r7,r7,0x27 |
| 1498 | |
| 1499 | 8: |
| 1500 | l.addi r7,r7,0x30 |
| 1501 | // putc: |
| 1502 | l.movhi r4,hi(UART_BASE_ADD) |
| 1503 | |
| 1504 | l.addi r6,r0,0x20 |
| 1505 | 1: l.lbz r5,5(r4) |
| 1506 | l.andi r5,r5,0x20 |
| 1507 | l.sfeq r5,r6 |
| 1508 | l.bnf 1b |
| 1509 | l.nop |
| 1510 | |
| 1511 | l.sb 0(r4),r7 |
| 1512 | |
| 1513 | l.addi r6,r0,0x60 |
| 1514 | 1: l.lbz r5,5(r4) |
| 1515 | l.andi r5,r5,0x60 |
| 1516 | l.sfeq r5,r6 |
| 1517 | l.bnf 1b |
| 1518 | l.nop |
| 1519 | |
| 1520 | /* next character */ |
| 1521 | l.j 2b |
| 1522 | l.addi r8,r8,-0x4 |
| 1523 | |
| 1524 | 9: |
| 1525 | EMERGENCY_PRINT_LOAD_GPR8 |
| 1526 | EMERGENCY_PRINT_LOAD_GPR7 |
| 1527 | EMERGENCY_PRINT_LOAD_GPR6 |
| 1528 | EMERGENCY_PRINT_LOAD_GPR5 |
| 1529 | EMERGENCY_PRINT_LOAD_GPR4 |
| 1530 | l.jr r9 |
| 1531 | l.nop |
| 1532 | |
| 1533 | |
| 1534 | /* |
| 1535 | * This should be used for debugging only. |
| 1536 | * It messes up the Linux early serial output |
| 1537 | * somehow, so use it sparingly and essentially |
| 1538 | * only if you need to debug something that goes wrong |
| 1539 | * before Linux gets the early serial going. |
| 1540 | * |
| 1541 | * Furthermore, you'll have to make sure you set the |
| 1542 | * UART_DEVISOR correctly according to the system |
| 1543 | * clock rate. |
| 1544 | * |
| 1545 | * |
| 1546 | */ |
| 1547 | |
| 1548 | |
| 1549 | |
| 1550 | #define SYS_CLK 20000000 |
| 1551 | //#define SYS_CLK 1843200 |
| 1552 | #define OR32_CONSOLE_BAUD 115200 |
| 1553 | #define UART_DIVISOR SYS_CLK/(16*OR32_CONSOLE_BAUD) |
| 1554 | |
| 1555 | ENTRY(_early_uart_init) |
| 1556 | l.movhi r3,hi(UART_BASE_ADD) |
| 1557 | |
| 1558 | l.addi r4,r0,0x7 |
| 1559 | l.sb 0x2(r3),r4 |
| 1560 | |
| 1561 | l.addi r4,r0,0x0 |
| 1562 | l.sb 0x1(r3),r4 |
| 1563 | |
| 1564 | l.addi r4,r0,0x3 |
| 1565 | l.sb 0x3(r3),r4 |
| 1566 | |
| 1567 | l.lbz r5,3(r3) |
| 1568 | l.ori r4,r5,0x80 |
| 1569 | l.sb 0x3(r3),r4 |
| 1570 | l.addi r4,r0,((UART_DIVISOR>>8) & 0x000000ff) |
| 1571 | l.sb UART_DLM(r3),r4 |
| 1572 | l.addi r4,r0,((UART_DIVISOR) & 0x000000ff) |
| 1573 | l.sb UART_DLL(r3),r4 |
| 1574 | l.sb 0x3(r3),r5 |
| 1575 | |
| 1576 | l.jr r9 |
| 1577 | l.nop |
| 1578 | |
| 1579 | _string_copying_linux: |
| 1580 | .string "\n\n\n\n\n\rCopying Linux... \0" |
| 1581 | |
| 1582 | _string_ok_booting: |
| 1583 | .string "Ok, booting the kernel.\n\r\0" |
| 1584 | |
| 1585 | _string_unhandled_exception: |
| 1586 | .string "\n\rRunarunaround: Unhandled exception 0x\0" |
| 1587 | |
| 1588 | _string_epc_prefix: |
| 1589 | .string ": EPC=0x\0" |
| 1590 | |
| 1591 | _string_nl: |
| 1592 | .string "\n\r\0" |
| 1593 | |
| 1594 | .global _string_esr_irq_bug |
| 1595 | _string_esr_irq_bug: |
| 1596 | .string "\n\rESR external interrupt bug, for details look into entry.S\n\r\0" |
| 1597 | |
| 1598 | |
| 1599 | |
| 1600 | /* ========================================[ page aligned structures ]=== */ |
| 1601 | |
| 1602 | /* |
| 1603 | * .data section should be page aligned |
| 1604 | * (look into arch/or32/kernel/vmlinux.lds) |
| 1605 | */ |
| 1606 | .section .data,"aw" |
| 1607 | .align 8192 |
| 1608 | .global empty_zero_page |
| 1609 | empty_zero_page: |
| 1610 | .space 8192 |
| 1611 | |
| 1612 | .global swapper_pg_dir |
| 1613 | swapper_pg_dir: |
| 1614 | .space 8192 |
| 1615 | |
| 1616 | .global _unhandled_stack |
| 1617 | _unhandled_stack: |
| 1618 | .space 8192 |
| 1619 | _unhandled_stack_top: |
| 1620 | |
| 1621 | /* ============================================================[ EOF ]=== */ |