Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 2 | /* align.c - handle alignment exceptions for the Power PC. |
| 3 | * |
| 4 | * Copyright (c) 1996 Paul Mackerras <paulus@cs.anu.edu.au> |
| 5 | * Copyright (c) 1998-1999 TiVo, Inc. |
| 6 | * PowerPC 403GCX modifications. |
| 7 | * Copyright (c) 1999 Grant Erickson <grant@lcse.umn.edu> |
| 8 | * PowerPC 403GCX/405GP modifications. |
| 9 | * Copyright (c) 2001-2002 PPC64 team, IBM Corp |
| 10 | * 64-bit and Power4 support |
| 11 | * Copyright (c) 2005 Benjamin Herrenschmidt, IBM Corp |
| 12 | * <benh@kernel.crashing.org> |
| 13 | * Merge ppc32 and ppc64 implementations |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 14 | */ |
| 15 | |
| 16 | #include <linux/kernel.h> |
| 17 | #include <linux/mm.h> |
| 18 | #include <asm/processor.h> |
Linus Torvalds | 7c0f6ba | 2016-12-24 11:46:01 -0800 | [diff] [blame] | 19 | #include <linux/uaccess.h> |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 20 | #include <asm/cache.h> |
| 21 | #include <asm/cputable.h> |
Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 22 | #include <asm/emulated_ops.h> |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 23 | #include <asm/switch_to.h> |
Aneesh Kumar K.V | ddca156 | 2014-05-12 17:04:06 +0530 | [diff] [blame] | 24 | #include <asm/disassemble.h> |
Kevin Hao | b92a226 | 2016-07-23 14:42:40 +0530 | [diff] [blame] | 25 | #include <asm/cpu_has_feature.h> |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 26 | #include <asm/sstep.h> |
Jordan Niethe | 7534625 | 2020-05-06 13:40:26 +1000 | [diff] [blame] | 27 | #include <asm/inst.h> |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 28 | |
| 29 | struct aligninfo { |
| 30 | unsigned char len; |
| 31 | unsigned char flags; |
| 32 | }; |
| 33 | |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 34 | |
| 35 | #define INVALID { 0, 0 } |
| 36 | |
Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 37 | /* Bits in the flags field */ |
| 38 | #define LD 0 /* load */ |
| 39 | #define ST 1 /* store */ |
Paul Mackerras | c6d4267 | 2007-08-10 14:07:38 +1000 | [diff] [blame] | 40 | #define SE 2 /* sign-extend value, or FP ld/st as word */ |
Paul Mackerras | fab5db9 | 2006-06-07 16:14:40 +1000 | [diff] [blame] | 41 | #define SW 0x20 /* byte swap */ |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 42 | #define E4 0x40 /* SPE endianness is word */ |
| 43 | #define E8 0x80 /* SPE endianness is double word */ |
Anton Blanchard | f83319d | 2014-03-28 17:01:23 +1100 | [diff] [blame] | 44 | |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 45 | #ifdef CONFIG_SPE |
| 46 | |
| 47 | static struct aligninfo spe_aligninfo[32] = { |
| 48 | { 8, LD+E8 }, /* 0 00 00: evldd[x] */ |
| 49 | { 8, LD+E4 }, /* 0 00 01: evldw[x] */ |
| 50 | { 8, LD }, /* 0 00 10: evldh[x] */ |
| 51 | INVALID, /* 0 00 11 */ |
| 52 | { 2, LD }, /* 0 01 00: evlhhesplat[x] */ |
| 53 | INVALID, /* 0 01 01 */ |
| 54 | { 2, LD }, /* 0 01 10: evlhhousplat[x] */ |
| 55 | { 2, LD+SE }, /* 0 01 11: evlhhossplat[x] */ |
| 56 | { 4, LD }, /* 0 10 00: evlwhe[x] */ |
| 57 | INVALID, /* 0 10 01 */ |
| 58 | { 4, LD }, /* 0 10 10: evlwhou[x] */ |
| 59 | { 4, LD+SE }, /* 0 10 11: evlwhos[x] */ |
| 60 | { 4, LD+E4 }, /* 0 11 00: evlwwsplat[x] */ |
| 61 | INVALID, /* 0 11 01 */ |
| 62 | { 4, LD }, /* 0 11 10: evlwhsplat[x] */ |
| 63 | INVALID, /* 0 11 11 */ |
| 64 | |
| 65 | { 8, ST+E8 }, /* 1 00 00: evstdd[x] */ |
| 66 | { 8, ST+E4 }, /* 1 00 01: evstdw[x] */ |
| 67 | { 8, ST }, /* 1 00 10: evstdh[x] */ |
| 68 | INVALID, /* 1 00 11 */ |
| 69 | INVALID, /* 1 01 00 */ |
| 70 | INVALID, /* 1 01 01 */ |
| 71 | INVALID, /* 1 01 10 */ |
| 72 | INVALID, /* 1 01 11 */ |
| 73 | { 4, ST }, /* 1 10 00: evstwhe[x] */ |
| 74 | INVALID, /* 1 10 01 */ |
| 75 | { 4, ST }, /* 1 10 10: evstwho[x] */ |
| 76 | INVALID, /* 1 10 11 */ |
| 77 | { 4, ST+E4 }, /* 1 11 00: evstwwe[x] */ |
| 78 | INVALID, /* 1 11 01 */ |
| 79 | { 4, ST+E4 }, /* 1 11 10: evstwwo[x] */ |
| 80 | INVALID, /* 1 11 11 */ |
| 81 | }; |
| 82 | |
| 83 | #define EVLDD 0x00 |
| 84 | #define EVLDW 0x01 |
| 85 | #define EVLDH 0x02 |
| 86 | #define EVLHHESPLAT 0x04 |
| 87 | #define EVLHHOUSPLAT 0x06 |
| 88 | #define EVLHHOSSPLAT 0x07 |
| 89 | #define EVLWHE 0x08 |
| 90 | #define EVLWHOU 0x0A |
| 91 | #define EVLWHOS 0x0B |
| 92 | #define EVLWWSPLAT 0x0C |
| 93 | #define EVLWHSPLAT 0x0E |
| 94 | #define EVSTDD 0x10 |
| 95 | #define EVSTDW 0x11 |
| 96 | #define EVSTDH 0x12 |
| 97 | #define EVSTWHE 0x18 |
| 98 | #define EVSTWHO 0x1A |
| 99 | #define EVSTWWE 0x1C |
| 100 | #define EVSTWWO 0x1E |
| 101 | |
| 102 | /* |
| 103 | * Emulate SPE loads and stores. |
| 104 | * Only Book-E has these instructions, and it does true little-endian, |
| 105 | * so we don't need the address swizzling. |
| 106 | */ |
| 107 | static int emulate_spe(struct pt_regs *regs, unsigned int reg, |
Jordan Niethe | 94afd06 | 2020-05-06 13:40:31 +1000 | [diff] [blame] | 108 | struct ppc_inst ppc_instr) |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 109 | { |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 110 | union { |
| 111 | u64 ll; |
| 112 | u32 w[2]; |
| 113 | u16 h[4]; |
| 114 | u8 v[8]; |
| 115 | } data, temp; |
| 116 | unsigned char __user *p, *addr; |
| 117 | unsigned long *evr = ¤t->thread.evr[reg]; |
Jordan Niethe | 94afd06 | 2020-05-06 13:40:31 +1000 | [diff] [blame] | 118 | unsigned int nb, flags, instr; |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 119 | |
Jordan Niethe | 94afd06 | 2020-05-06 13:40:31 +1000 | [diff] [blame] | 120 | instr = ppc_inst_val(ppc_instr); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 121 | instr = (instr >> 1) & 0x1f; |
| 122 | |
| 123 | /* DAR has the operand effective address */ |
| 124 | addr = (unsigned char __user *)regs->dar; |
| 125 | |
| 126 | nb = spe_aligninfo[instr].len; |
| 127 | flags = spe_aligninfo[instr].flags; |
| 128 | |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 129 | /* userland only */ |
| 130 | if (unlikely(!user_mode(regs))) |
| 131 | return 0; |
| 132 | |
| 133 | flush_spe_to_thread(current); |
| 134 | |
| 135 | /* If we are loading, get the data from user space, else |
| 136 | * get it from register values |
| 137 | */ |
| 138 | if (flags & ST) { |
| 139 | data.ll = 0; |
| 140 | switch (instr) { |
| 141 | case EVSTDD: |
| 142 | case EVSTDW: |
| 143 | case EVSTDH: |
| 144 | data.w[0] = *evr; |
| 145 | data.w[1] = regs->gpr[reg]; |
| 146 | break; |
| 147 | case EVSTWHE: |
| 148 | data.h[2] = *evr >> 16; |
| 149 | data.h[3] = regs->gpr[reg] >> 16; |
| 150 | break; |
| 151 | case EVSTWHO: |
| 152 | data.h[2] = *evr & 0xffff; |
| 153 | data.h[3] = regs->gpr[reg] & 0xffff; |
| 154 | break; |
| 155 | case EVSTWWE: |
| 156 | data.w[1] = *evr; |
| 157 | break; |
| 158 | case EVSTWWO: |
| 159 | data.w[1] = regs->gpr[reg]; |
| 160 | break; |
| 161 | default: |
| 162 | return -EINVAL; |
| 163 | } |
| 164 | } else { |
| 165 | temp.ll = data.ll = 0; |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 166 | p = addr; |
| 167 | |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 168 | if (!user_read_access_begin(addr, nb)) |
| 169 | return -EFAULT; |
| 170 | |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 171 | switch (nb) { |
| 172 | case 8: |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 173 | unsafe_get_user(temp.v[0], p++, Efault_read); |
| 174 | unsafe_get_user(temp.v[1], p++, Efault_read); |
| 175 | unsafe_get_user(temp.v[2], p++, Efault_read); |
| 176 | unsafe_get_user(temp.v[3], p++, Efault_read); |
Gustavo A. R. Silva | 5e66a0c | 2020-07-27 17:42:01 -0500 | [diff] [blame] | 177 | fallthrough; |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 178 | case 4: |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 179 | unsafe_get_user(temp.v[4], p++, Efault_read); |
| 180 | unsafe_get_user(temp.v[5], p++, Efault_read); |
Gustavo A. R. Silva | 5e66a0c | 2020-07-27 17:42:01 -0500 | [diff] [blame] | 181 | fallthrough; |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 182 | case 2: |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 183 | unsafe_get_user(temp.v[6], p++, Efault_read); |
| 184 | unsafe_get_user(temp.v[7], p++, Efault_read); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 185 | } |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 186 | user_read_access_end(); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 187 | |
| 188 | switch (instr) { |
| 189 | case EVLDD: |
| 190 | case EVLDW: |
| 191 | case EVLDH: |
| 192 | data.ll = temp.ll; |
| 193 | break; |
| 194 | case EVLHHESPLAT: |
| 195 | data.h[0] = temp.h[3]; |
| 196 | data.h[2] = temp.h[3]; |
| 197 | break; |
| 198 | case EVLHHOUSPLAT: |
| 199 | case EVLHHOSSPLAT: |
| 200 | data.h[1] = temp.h[3]; |
| 201 | data.h[3] = temp.h[3]; |
| 202 | break; |
| 203 | case EVLWHE: |
| 204 | data.h[0] = temp.h[2]; |
| 205 | data.h[2] = temp.h[3]; |
| 206 | break; |
| 207 | case EVLWHOU: |
| 208 | case EVLWHOS: |
| 209 | data.h[1] = temp.h[2]; |
| 210 | data.h[3] = temp.h[3]; |
| 211 | break; |
| 212 | case EVLWWSPLAT: |
| 213 | data.w[0] = temp.w[1]; |
| 214 | data.w[1] = temp.w[1]; |
| 215 | break; |
| 216 | case EVLWHSPLAT: |
| 217 | data.h[0] = temp.h[2]; |
| 218 | data.h[1] = temp.h[2]; |
| 219 | data.h[2] = temp.h[3]; |
| 220 | data.h[3] = temp.h[3]; |
| 221 | break; |
| 222 | default: |
| 223 | return -EINVAL; |
| 224 | } |
| 225 | } |
| 226 | |
| 227 | if (flags & SW) { |
| 228 | switch (flags & 0xf0) { |
| 229 | case E8: |
Anton Blanchard | f626190 | 2013-09-23 12:04:46 +1000 | [diff] [blame] | 230 | data.ll = swab64(data.ll); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 231 | break; |
| 232 | case E4: |
Anton Blanchard | f626190 | 2013-09-23 12:04:46 +1000 | [diff] [blame] | 233 | data.w[0] = swab32(data.w[0]); |
| 234 | data.w[1] = swab32(data.w[1]); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 235 | break; |
| 236 | /* Its half word endian */ |
| 237 | default: |
Anton Blanchard | f626190 | 2013-09-23 12:04:46 +1000 | [diff] [blame] | 238 | data.h[0] = swab16(data.h[0]); |
| 239 | data.h[1] = swab16(data.h[1]); |
| 240 | data.h[2] = swab16(data.h[2]); |
| 241 | data.h[3] = swab16(data.h[3]); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 242 | break; |
| 243 | } |
| 244 | } |
| 245 | |
| 246 | if (flags & SE) { |
| 247 | data.w[0] = (s16)data.h[1]; |
| 248 | data.w[1] = (s16)data.h[3]; |
| 249 | } |
| 250 | |
| 251 | /* Store result to memory or update registers */ |
| 252 | if (flags & ST) { |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 253 | p = addr; |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 254 | |
| 255 | if (!user_write_access_begin(addr, nb)) |
| 256 | return -EFAULT; |
| 257 | |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 258 | switch (nb) { |
| 259 | case 8: |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 260 | unsafe_put_user(data.v[0], p++, Efault_write); |
| 261 | unsafe_put_user(data.v[1], p++, Efault_write); |
| 262 | unsafe_put_user(data.v[2], p++, Efault_write); |
| 263 | unsafe_put_user(data.v[3], p++, Efault_write); |
Gustavo A. R. Silva | 5e66a0c | 2020-07-27 17:42:01 -0500 | [diff] [blame] | 264 | fallthrough; |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 265 | case 4: |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 266 | unsafe_put_user(data.v[4], p++, Efault_write); |
| 267 | unsafe_put_user(data.v[5], p++, Efault_write); |
Gustavo A. R. Silva | 5e66a0c | 2020-07-27 17:42:01 -0500 | [diff] [blame] | 268 | fallthrough; |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 269 | case 2: |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 270 | unsafe_put_user(data.v[6], p++, Efault_write); |
| 271 | unsafe_put_user(data.v[7], p++, Efault_write); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 272 | } |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 273 | user_write_access_end(); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 274 | } else { |
| 275 | *evr = data.w[0]; |
| 276 | regs->gpr[reg] = data.w[1]; |
| 277 | } |
| 278 | |
| 279 | return 1; |
Christophe Leroy | 3fa3db3 | 2021-03-12 13:25:11 +0000 | [diff] [blame] | 280 | |
| 281 | Efault_read: |
| 282 | user_read_access_end(); |
| 283 | return -EFAULT; |
| 284 | |
| 285 | Efault_write: |
| 286 | user_write_access_end(); |
| 287 | return -EFAULT; |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 288 | } |
| 289 | #endif /* CONFIG_SPE */ |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 290 | |
| 291 | /* |
| 292 | * Called on alignment exception. Attempts to fixup |
| 293 | * |
| 294 | * Return 1 on success |
| 295 | * Return 0 if unable to handle the interrupt |
| 296 | * Return -EFAULT if data address is bad |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 297 | * Other negative return values indicate that the instruction can't |
| 298 | * be emulated, and the process should be given a SIGBUS. |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 299 | */ |
| 300 | |
| 301 | int fix_alignment(struct pt_regs *regs) |
| 302 | { |
Jordan Niethe | 94afd06 | 2020-05-06 13:40:31 +1000 | [diff] [blame] | 303 | struct ppc_inst instr; |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 304 | struct instruction_op op; |
| 305 | int r, type; |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 306 | |
Christophe Leroy | 111631b | 2021-03-10 17:46:45 +0000 | [diff] [blame] | 307 | if (is_kernel_addr(regs->nip)) |
Christophe Leroy | 41d6cf6 | 2021-04-14 13:08:42 +0000 | [diff] [blame] | 308 | r = copy_inst_from_kernel_nofault(&instr, (void *)regs->nip); |
Christophe Leroy | 111631b | 2021-03-10 17:46:45 +0000 | [diff] [blame] | 309 | else |
| 310 | r = __get_user_instr(instr, (void __user *)regs->nip); |
| 311 | |
| 312 | if (unlikely(r)) |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 313 | return -EFAULT; |
| 314 | if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) { |
| 315 | /* We don't handle PPC little-endian any more... */ |
| 316 | if (cpu_has_feature(CPU_FTR_PPC_LE)) |
| 317 | return -EIO; |
Jordan Niethe | aabd223 | 2020-05-06 13:40:29 +1000 | [diff] [blame] | 318 | instr = ppc_inst_swab(instr); |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 319 | } |
| 320 | |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 321 | #ifdef CONFIG_SPE |
Jordan Niethe | 8094892 | 2020-05-06 13:40:28 +1000 | [diff] [blame] | 322 | if (ppc_inst_primary_opcode(instr) == 0x4) { |
Jordan Niethe | 777e26f | 2020-05-06 13:40:27 +1000 | [diff] [blame] | 323 | int reg = (ppc_inst_val(instr) >> 21) & 0x1f; |
Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 324 | PPC_WARN_ALIGNMENT(spe, regs); |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 325 | return emulate_spe(regs, reg, instr); |
Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 326 | } |
Kumar Gala | 26caeb2 | 2007-08-24 16:42:53 -0500 | [diff] [blame] | 327 | #endif |
| 328 | |
Chris Smart | ae26b36 | 2016-06-17 09:33:45 +1000 | [diff] [blame] | 329 | |
| 330 | /* |
| 331 | * ISA 3.0 (such as P9) copy, copy_first, paste and paste_last alignment |
| 332 | * check. |
| 333 | * |
| 334 | * Send a SIGBUS to the process that caused the fault. |
| 335 | * |
| 336 | * We do not emulate these because paste may contain additional metadata |
| 337 | * when pasting to a co-processor. Furthermore, paste_last is the |
| 338 | * synchronisation point for preceding copy/paste sequences. |
| 339 | */ |
Jordan Niethe | 777e26f | 2020-05-06 13:40:27 +1000 | [diff] [blame] | 340 | if ((ppc_inst_val(instr) & 0xfc0006fe) == (PPC_INST_COPY & 0xfc0006fe)) |
Chris Smart | ae26b36 | 2016-06-17 09:33:45 +1000 | [diff] [blame] | 341 | return -EIO; |
| 342 | |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 343 | r = analyse_instr(&op, regs, instr); |
| 344 | if (r < 0) |
| 345 | return -EINVAL; |
| 346 | |
Ravi Bangoria | e6684d0 | 2018-05-21 09:51:06 +0530 | [diff] [blame] | 347 | type = GETTYPE(op.type); |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 348 | if (!OP_IS_LOAD_STORE(type)) { |
Paul Mackerras | 1bc944c | 2017-09-13 14:51:24 +1000 | [diff] [blame] | 349 | if (op.type != CACHEOP + DCBZ) |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 350 | return -EINVAL; |
Anton Blanchard | eecff81 | 2009-10-27 18:46:55 +0000 | [diff] [blame] | 351 | PPC_WARN_ALIGNMENT(dcbz, regs); |
Christophe Leroy | cbe654c | 2021-09-16 16:52:09 +0200 | [diff] [blame] | 352 | WARN_ON_ONCE(!user_mode(regs)); |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 353 | r = emulate_dcbz(op.ea, regs); |
| 354 | } else { |
| 355 | if (type == LARX || type == STCX) |
| 356 | return -EIO; |
| 357 | PPC_WARN_ALIGNMENT(unaligned, regs); |
| 358 | r = emulate_loadstore(regs, &op); |
Geert Uytterhoeven | 80947e7 | 2009-05-18 02:10:05 +0000 | [diff] [blame] | 359 | } |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 360 | |
Paul Mackerras | 31bfdb0 | 2017-08-30 14:12:40 +1000 | [diff] [blame] | 361 | if (!r) |
| 362 | return 1; |
| 363 | return r; |
Benjamin Herrenschmidt | 5daf907 | 2005-11-18 14:09:41 +1100 | [diff] [blame] | 364 | } |