Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 1 | /* align.c - handle alignment exceptions for the Power PC. |
| 2 | * |
| 3 | * Copyright (c) 1996 Paul Mackerras <paulus@cs.anu.edu.au> |
| 4 | * Copyright (c) 1998-1999 TiVo, Inc. |
| 5 | * PowerPC 403GCX modifications. |
| 6 | * Copyright (c) 1999 Grant Erickson <grant@lcse.umn.edu> |
| 7 | * PowerPC 403GCX/405GP modifications. |
| 8 | * Copyright (c) 2001-2002 PPC64 team, IBM Corp |
| 9 | * 64-bit and Power4 support |
| 10 | * Copyright (c) 2005 Benjamin Herrenschmidt, IBM Corp |
| 11 | * <benh@kernel.crashing.org> |
| 12 | * Merge ppc32 and ppc64 implementations |
| 13 | * |
| 14 | * This program is free software; you can redistribute it and/or |
| 15 | * modify it under the terms of the GNU General Public License |
| 16 | * as published by the Free Software Foundation; either version |
| 17 | * 2 of the License, or (at your option) any later version. |
| 18 | */ |
| 19 | |
| 20 | #include <linux/kernel.h> |
| 21 | #include <linux/mm.h> |
| 22 | #include <asm/processor.h> |
Linus Torvalds | 7c0f6ba | 2016-12-24 11:46:01 -0800 | [diff] [blame] | 23 | #include <linux/uaccess.h> |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 24 | #include <asm/cache.h> |
| 25 | #include <asm/cputable.h> |
Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 26 | #include <asm/emulated_ops.h> |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 27 | #include <asm/switch_to.h> |
Aneesh Kumar K.V | ddca156 | 2014-05-12 17:04:06 +0530 | [diff] [blame] | 28 | #include <asm/disassemble.h> |
Kevin Hao | b92a226 | 2016-07-23 14:42:40 +0530 | [diff] [blame] | 29 | #include <asm/cpu_has_feature.h> |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 30 | #include <asm/sstep.h> |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 31 | |
| 32 | struct aligninfo { |
| 33 | unsigned char len; |
| 34 | unsigned char flags; |
| 35 | }; |
| 36 | |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 37 | |
| 38 | #define INVALID { 0, 0 } |
| 39 | |
Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 40 | /* Bits in the flags field */ |
| 41 | #define LD 0 /* load */ |
| 42 | #define ST 1 /* store */ |
Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 43 | #define SE 2 /* sign-extend value, or FP ld/st as word */ |
Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 44 | #define SW 0x20 /* byte swap */ |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 45 | #define E4 0x40 /* SPE endianness is word */ |
| 46 | #define E8 0x80 /* SPE endianness is double word */ |
Anton Blanchard | f83319d | 2014-03-28 17:01:23 +1100 | [diff] [blame] | 47 | |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 48 | #ifdef CONFIG_SPE |
| 49 | |
| 50 | static struct aligninfo spe_aligninfo[32] = { |
| 51 | { 8, LD+E8 }, /* 0 00 00: evldd[x] */ |
| 52 | { 8, LD+E4 }, /* 0 00 01: evldw[x] */ |
| 53 | { 8, LD }, /* 0 00 10: evldh[x] */ |
| 54 | INVALID, /* 0 00 11 */ |
| 55 | { 2, LD }, /* 0 01 00: evlhhesplat[x] */ |
| 56 | INVALID, /* 0 01 01 */ |
| 57 | { 2, LD }, /* 0 01 10: evlhhousplat[x] */ |
| 58 | { 2, LD+SE }, /* 0 01 11: evlhhossplat[x] */ |
| 59 | { 4, LD }, /* 0 10 00: evlwhe[x] */ |
| 60 | INVALID, /* 0 10 01 */ |
| 61 | { 4, LD }, /* 0 10 10: evlwhou[x] */ |
| 62 | { 4, LD+SE }, /* 0 10 11: evlwhos[x] */ |
| 63 | { 4, LD+E4 }, /* 0 11 00: evlwwsplat[x] */ |
| 64 | INVALID, /* 0 11 01 */ |
| 65 | { 4, LD }, /* 0 11 10: evlwhsplat[x] */ |
| 66 | INVALID, /* 0 11 11 */ |
| 67 | |
| 68 | { 8, ST+E8 }, /* 1 00 00: evstdd[x] */ |
| 69 | { 8, ST+E4 }, /* 1 00 01: evstdw[x] */ |
| 70 | { 8, ST }, /* 1 00 10: evstdh[x] */ |
| 71 | INVALID, /* 1 00 11 */ |
| 72 | INVALID, /* 1 01 00 */ |
| 73 | INVALID, /* 1 01 01 */ |
| 74 | INVALID, /* 1 01 10 */ |
| 75 | INVALID, /* 1 01 11 */ |
| 76 | { 4, ST }, /* 1 10 00: evstwhe[x] */ |
| 77 | INVALID, /* 1 10 01 */ |
| 78 | { 4, ST }, /* 1 10 10: evstwho[x] */ |
| 79 | INVALID, /* 1 10 11 */ |
| 80 | { 4, ST+E4 }, /* 1 11 00: evstwwe[x] */ |
| 81 | INVALID, /* 1 11 01 */ |
| 82 | { 4, ST+E4 }, /* 1 11 10: evstwwo[x] */ |
| 83 | INVALID, /* 1 11 11 */ |
| 84 | }; |
| 85 | |
| 86 | #define EVLDD 0x00 |
| 87 | #define EVLDW 0x01 |
| 88 | #define EVLDH 0x02 |
| 89 | #define EVLHHESPLAT 0x04 |
| 90 | #define EVLHHOUSPLAT 0x06 |
| 91 | #define EVLHHOSSPLAT 0x07 |
| 92 | #define EVLWHE 0x08 |
| 93 | #define EVLWHOU 0x0A |
| 94 | #define EVLWHOS 0x0B |
| 95 | #define EVLWWSPLAT 0x0C |
| 96 | #define EVLWHSPLAT 0x0E |
| 97 | #define EVSTDD 0x10 |
| 98 | #define EVSTDW 0x11 |
| 99 | #define EVSTDH 0x12 |
| 100 | #define EVSTWHE 0x18 |
| 101 | #define EVSTWHO 0x1A |
| 102 | #define EVSTWWE 0x1C |
| 103 | #define EVSTWWO 0x1E |
| 104 | |
| 105 | /* |
| 106 | * Emulate SPE loads and stores. |
| 107 | * Only Book-E has these instructions, and it does true little-endian, |
| 108 | * so we don't need the address swizzling. |
| 109 | */ |
| 110 | static int emulate_spe(struct pt_regs *regs, unsigned int reg, |
| 111 | unsigned int instr) |
| 112 | { |
Anton Blanchard | f626190 | 2013-09-23 12:04:46 +1000 | [diff] [blame] | 113 | int ret; |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 114 | union { |
| 115 | u64 ll; |
| 116 | u32 w[2]; |
| 117 | u16 h[4]; |
| 118 | u8 v[8]; |
| 119 | } data, temp; |
| 120 | unsigned char __user *p, *addr; |
| 121 | unsigned long *evr = ¤t->thread.evr[reg]; |
| 122 | unsigned int nb, flags; |
| 123 | |
| 124 | instr = (instr >> 1) & 0x1f; |
| 125 | |
| 126 | /* DAR has the operand effective address */ |
| 127 | addr = (unsigned char __user *)regs->dar; |
| 128 | |
| 129 | nb = spe_aligninfo[instr].len; |
| 130 | flags = spe_aligninfo[instr].flags; |
| 131 | |
| 132 | /* Verify the address of the operand */ |
| 133 | if (unlikely(user_mode(regs) && |
| 134 | !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ), |
| 135 | addr, nb))) |
| 136 | return -EFAULT; |
| 137 | |
| 138 | /* userland only */ |
| 139 | if (unlikely(!user_mode(regs))) |
| 140 | return 0; |
| 141 | |
| 142 | flush_spe_to_thread(current); |
| 143 | |
| 144 | /* If we are loading, get the data from user space, else |
| 145 | * get it from register values |
| 146 | */ |
| 147 | if (flags & ST) { |
| 148 | data.ll = 0; |
| 149 | switch (instr) { |
| 150 | case EVSTDD: |
| 151 | case EVSTDW: |
| 152 | case EVSTDH: |
| 153 | data.w[0] = *evr; |
| 154 | data.w[1] = regs->gpr[reg]; |
| 155 | break; |
| 156 | case EVSTWHE: |
| 157 | data.h[2] = *evr >> 16; |
| 158 | data.h[3] = regs->gpr[reg] >> 16; |
| 159 | break; |
| 160 | case EVSTWHO: |
| 161 | data.h[2] = *evr & 0xffff; |
| 162 | data.h[3] = regs->gpr[reg] & 0xffff; |
| 163 | break; |
| 164 | case EVSTWWE: |
| 165 | data.w[1] = *evr; |
| 166 | break; |
| 167 | case EVSTWWO: |
| 168 | data.w[1] = regs->gpr[reg]; |
| 169 | break; |
| 170 | default: |
| 171 | return -EINVAL; |
| 172 | } |
| 173 | } else { |
| 174 | temp.ll = data.ll = 0; |
| 175 | ret = 0; |
| 176 | p = addr; |
| 177 | |
| 178 | switch (nb) { |
| 179 | case 8: |
| 180 | ret |= __get_user_inatomic(temp.v[0], p++); |
| 181 | ret |= __get_user_inatomic(temp.v[1], p++); |
| 182 | ret |= __get_user_inatomic(temp.v[2], p++); |
| 183 | ret |= __get_user_inatomic(temp.v[3], p++); |
| 184 | case 4: |
| 185 | ret |= __get_user_inatomic(temp.v[4], p++); |
| 186 | ret |= __get_user_inatomic(temp.v[5], p++); |
| 187 | case 2: |
| 188 | ret |= __get_user_inatomic(temp.v[6], p++); |
| 189 | ret |= __get_user_inatomic(temp.v[7], p++); |
| 190 | if (unlikely(ret)) |
| 191 | return -EFAULT; |
| 192 | } |
| 193 | |
| 194 | switch (instr) { |
| 195 | case EVLDD: |
| 196 | case EVLDW: |
| 197 | case EVLDH: |
| 198 | data.ll = temp.ll; |
| 199 | break; |
| 200 | case EVLHHESPLAT: |
| 201 | data.h[0] = temp.h[3]; |
| 202 | data.h[2] = temp.h[3]; |
| 203 | break; |
| 204 | case EVLHHOUSPLAT: |
| 205 | case EVLHHOSSPLAT: |
| 206 | data.h[1] = temp.h[3]; |
| 207 | data.h[3] = temp.h[3]; |
| 208 | break; |
| 209 | case EVLWHE: |
| 210 | data.h[0] = temp.h[2]; |
| 211 | data.h[2] = temp.h[3]; |
| 212 | break; |
| 213 | case EVLWHOU: |
| 214 | case EVLWHOS: |
| 215 | data.h[1] = temp.h[2]; |
| 216 | data.h[3] = temp.h[3]; |
| 217 | break; |
| 218 | case EVLWWSPLAT: |
| 219 | data.w[0] = temp.w[1]; |
| 220 | data.w[1] = temp.w[1]; |
| 221 | break; |
| 222 | case EVLWHSPLAT: |
| 223 | data.h[0] = temp.h[2]; |
| 224 | data.h[1] = temp.h[2]; |
| 225 | data.h[2] = temp.h[3]; |
| 226 | data.h[3] = temp.h[3]; |
| 227 | break; |
| 228 | default: |
| 229 | return -EINVAL; |
| 230 | } |
| 231 | } |
| 232 | |
| 233 | if (flags & SW) { |
| 234 | switch (flags & 0xf0) { |
| 235 | case E8: |
Anton Blanchard | f626190 | 2013-09-23 12:04:46 +1000 | [diff] [blame] | 236 | data.ll = swab64(data.ll); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 237 | break; |
| 238 | case E4: |
Anton Blanchard | f626190 | 2013-09-23 12:04:46 +1000 | [diff] [blame] | 239 | data.w[0] = swab32(data.w[0]); |
| 240 | data.w[1] = swab32(data.w[1]); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 241 | break; |
| 242 | /* Its half word endian */ |
| 243 | default: |
Anton Blanchard | f626190 | 2013-09-23 12:04:46 +1000 | [diff] [blame] | 244 | data.h[0] = swab16(data.h[0]); |
| 245 | data.h[1] = swab16(data.h[1]); |
| 246 | data.h[2] = swab16(data.h[2]); |
| 247 | data.h[3] = swab16(data.h[3]); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 248 | break; |
| 249 | } |
| 250 | } |
| 251 | |
| 252 | if (flags & SE) { |
| 253 | data.w[0] = (s16)data.h[1]; |
| 254 | data.w[1] = (s16)data.h[3]; |
| 255 | } |
| 256 | |
| 257 | /* Store result to memory or update registers */ |
| 258 | if (flags & ST) { |
| 259 | ret = 0; |
| 260 | p = addr; |
| 261 | switch (nb) { |
| 262 | case 8: |
| 263 | ret |= __put_user_inatomic(data.v[0], p++); |
| 264 | ret |= __put_user_inatomic(data.v[1], p++); |
| 265 | ret |= __put_user_inatomic(data.v[2], p++); |
| 266 | ret |= __put_user_inatomic(data.v[3], p++); |
| 267 | case 4: |
| 268 | ret |= __put_user_inatomic(data.v[4], p++); |
| 269 | ret |= __put_user_inatomic(data.v[5], p++); |
| 270 | case 2: |
| 271 | ret |= __put_user_inatomic(data.v[6], p++); |
| 272 | ret |= __put_user_inatomic(data.v[7], p++); |
| 273 | } |
| 274 | if (unlikely(ret)) |
| 275 | return -EFAULT; |
| 276 | } else { |
| 277 | *evr = data.w[0]; |
| 278 | regs->gpr[reg] = data.w[1]; |
| 279 | } |
| 280 | |
| 281 | return 1; |
| 282 | } |
| 283 | #endif /* CONFIG_SPE */ |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 284 | |
| 285 | /* |
| 286 | * Called on alignment exception. Attempts to fixup |
| 287 | * |
| 288 | * Return 1 on success |
| 289 | * Return 0 if unable to handle the interrupt |
| 290 | * Return -EFAULT if data address is bad |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 291 | * Other negative return values indicate that the instruction can't |
| 292 | * be emulated, and the process should be given a SIGBUS. |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 293 | */ |
| 294 | |
| 295 | int fix_alignment(struct pt_regs *regs) |
| 296 | { |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 297 | unsigned int instr; |
| 298 | struct instruction_op op; |
| 299 | int r, type; |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 300 | |
| 301 | /* |
| 302 | * We require a complete register set, if not, then our assembly |
| 303 | * is broken |
| 304 | */ |
| 305 | CHECK_FULL_REGS(regs); |
| 306 | |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 307 | if (unlikely(__get_user(instr, (unsigned int __user *)regs->nip))) |
| 308 | return -EFAULT; |
| 309 | if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) { |
| 310 | /* We don't handle PPC little-endian any more... */ |
| 311 | if (cpu_has_feature(CPU_FTR_PPC_LE)) |
| 312 | return -EIO; |
| 313 | instr = swab32(instr); |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 314 | } |
| 315 | |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 316 | #ifdef CONFIG_SPE |
Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 317 | if ((instr >> 26) == 0x4) { |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 318 | int reg = (instr >> 21) & 0x1f; |
Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 319 | PPC_WARN_ALIGNMENT(spe, regs); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 320 | return emulate_spe(regs, reg, instr); |
Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 321 | } |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 322 | #endif |
| 323 | |
Chris Smart | ae26b36 | 2016-06-17 09:33:45 +1000 | [diff] [blame] | 324 | |
| 325 | /* |
| 326 | * ISA 3.0 (such as P9) copy, copy_first, paste and paste_last alignment |
| 327 | * check. |
| 328 | * |
| 329 | * Send a SIGBUS to the process that caused the fault. |
| 330 | * |
| 331 | * We do not emulate these because paste may contain additional metadata |
| 332 | * when pasting to a co-processor. Furthermore, paste_last is the |
| 333 | * synchronisation point for preceding copy/paste sequences. |
| 334 | */ |
Paul Mackerras | 158f196 | 2017-10-25 18:16:53 +1100 | [diff] [blame] | 335 | if ((instr & 0xfc0006fe) == (PPC_INST_COPY & 0xfc0006fe)) |
Chris Smart | ae26b36 | 2016-06-17 09:33:45 +1000 | [diff] [blame] | 336 | return -EIO; |
| 337 | |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 338 | r = analyse_instr(&op, regs, instr); |
| 339 | if (r < 0) |
| 340 | return -EINVAL; |
| 341 | |
Ravi Bangoria | e6684d0 | 2018-05-21 09:51:06 +0530 | [diff] [blame^] | 342 | type = GETTYPE(op.type); |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 343 | if (!OP_IS_LOAD_STORE(type)) { |
Paul Mackerras | 1bc944c | 2017-09-13 14:51:24 +1000 | [diff] [blame] | 344 | if (op.type != CACHEOP + DCBZ) |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 345 | return -EINVAL; |
Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 346 | PPC_WARN_ALIGNMENT(dcbz, regs); |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 347 | r = emulate_dcbz(op.ea, regs); |
| 348 | } else { |
| 349 | if (type == LARX || type == STCX) |
| 350 | return -EIO; |
| 351 | PPC_WARN_ALIGNMENT(unaligned, regs); |
| 352 | r = emulate_loadstore(regs, &op); |
Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 353 | } |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 354 | |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 355 | if (!r) |
| 356 | return 1; |
| 357 | return r; |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 358 | } |