blob: 0a051dfeb177321ff4bc807c2b67fd3f3a87e089 [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Michael Ellermanaaddd3e2008-06-24 11:32:21 +10002/*
3 * Copyright 2008 Michael Ellerman, IBM Corporation.
Michael Ellermanaaddd3e2008-06-24 11:32:21 +10004 */
5
6#include <linux/kernel.h>
Naveen N. Rao71f6e582017-04-12 16:48:51 +05307#include <linux/kprobes.h>
Michael Ellermanae0dc732008-06-24 11:32:32 +10008#include <linux/vmalloc.h>
9#include <linux/init.h>
Andrea Righi27ac7922008-07-23 21:28:13 -070010#include <linux/mm.h>
Balbir Singh37bc3e52017-06-29 03:04:05 +100011#include <linux/cpuhotplug.h>
12#include <linux/slab.h>
Linus Torvalds7c0f6ba2016-12-24 11:46:01 -080013#include <linux/uaccess.h>
Michael Ellermanaaddd3e2008-06-24 11:32:21 +100014
Balbir Singh37bc3e52017-06-29 03:04:05 +100015#include <asm/tlbflush.h>
16#include <asm/page.h>
17#include <asm/code-patching.h>
Christophe Leroy252eb552017-11-21 15:28:20 +010018#include <asm/setup.h>
Jordan Niethe75346252020-05-06 13:40:26 +100019#include <asm/inst.h>
Michael Ellermanaaddd3e2008-06-24 11:32:21 +100020
Jordan Niethe94afd062020-05-06 13:40:31 +100021static int __patch_instruction(struct ppc_inst *exec_addr, struct ppc_inst instr,
22 struct ppc_inst *patch_addr)
Michael Ellermanaaddd3e2008-06-24 11:32:21 +100023{
Russell Curreyef296722019-04-18 16:51:23 +100024 int err = 0;
Steven Rostedtb6e37962012-04-26 08:31:18 +000025
Jordan Niethe650b55b2020-05-15 12:12:55 +100026 if (!ppc_inst_prefixed(instr)) {
27 __put_user_asm(ppc_inst_val(instr), patch_addr, err, "stw");
28 } else {
Michael Ellerman16ef9762020-05-26 17:26:30 +100029 __put_user_asm(ppc_inst_as_u64(instr), patch_addr, err, "std");
Jordan Niethe650b55b2020-05-15 12:12:55 +100030 }
31
Steven Rostedtb6e37962012-04-26 08:31:18 +000032 if (err)
33 return err;
Balbir Singh37bc3e52017-06-29 03:04:05 +100034
Christophe Leroy8cf4c052017-11-24 08:31:07 +010035 asm ("dcbst 0, %0; sync; icbi 0,%1; sync; isync" :: "r" (patch_addr),
36 "r" (exec_addr));
Balbir Singh37bc3e52017-06-29 03:04:05 +100037
Steven Rostedtb6e37962012-04-26 08:31:18 +000038 return 0;
Michael Ellermanaaddd3e2008-06-24 11:32:21 +100039}
40
Jordan Niethe94afd062020-05-06 13:40:31 +100041int raw_patch_instruction(struct ppc_inst *addr, struct ppc_inst instr)
Christophe Leroy8cf4c052017-11-24 08:31:07 +010042{
43 return __patch_instruction(addr, instr, addr);
44}
45
Balbir Singh37bc3e52017-06-29 03:04:05 +100046#ifdef CONFIG_STRICT_KERNEL_RWX
47static DEFINE_PER_CPU(struct vm_struct *, text_poke_area);
48
49static int text_area_cpu_up(unsigned int cpu)
50{
51 struct vm_struct *area;
52
53 area = get_vm_area(PAGE_SIZE, VM_ALLOC);
54 if (!area) {
55 WARN_ONCE(1, "Failed to create text area for cpu %d\n",
56 cpu);
57 return -1;
58 }
59 this_cpu_write(text_poke_area, area);
60
61 return 0;
62}
63
64static int text_area_cpu_down(unsigned int cpu)
65{
66 free_vm_area(this_cpu_read(text_poke_area));
67 return 0;
68}
69
70/*
71 * Run as a late init call. This allows all the boot time patching to be done
72 * simply by patching the code, and then we're called here prior to
73 * mark_rodata_ro(), which happens after all init calls are run. Although
74 * BUG_ON() is rude, in this case it should only happen if ENOMEM, and we judge
75 * it as being preferable to a kernel that will crash later when someone tries
76 * to use patch_instruction().
77 */
78static int __init setup_text_poke_area(void)
79{
80 BUG_ON(!cpuhp_setup_state(CPUHP_AP_ONLINE_DYN,
81 "powerpc/text_poke:online", text_area_cpu_up,
82 text_area_cpu_down));
83
84 return 0;
85}
86late_initcall(setup_text_poke_area);
87
88/*
89 * This can be called for kernel text or a module.
90 */
91static int map_patch_area(void *addr, unsigned long text_poke_addr)
92{
93 unsigned long pfn;
94 int err;
95
96 if (is_vmalloc_addr(addr))
97 pfn = vmalloc_to_pfn(addr);
98 else
99 pfn = __pa_symbol(addr) >> PAGE_SHIFT;
100
Christophe Leroyc766ee72018-10-09 13:51:45 +0000101 err = map_kernel_page(text_poke_addr, (pfn << PAGE_SHIFT), PAGE_KERNEL);
Balbir Singh37bc3e52017-06-29 03:04:05 +1000102
103 pr_devel("Mapped addr %lx with pfn %lx:%d\n", text_poke_addr, pfn, err);
104 if (err)
105 return -1;
106
107 return 0;
108}
109
110static inline int unmap_patch_area(unsigned long addr)
111{
112 pte_t *ptep;
113 pmd_t *pmdp;
114 pud_t *pudp;
Mike Rapoport2fb47062020-06-04 16:46:44 -0700115 p4d_t *p4dp;
Balbir Singh37bc3e52017-06-29 03:04:05 +1000116 pgd_t *pgdp;
117
118 pgdp = pgd_offset_k(addr);
119 if (unlikely(!pgdp))
120 return -EINVAL;
121
Mike Rapoport2fb47062020-06-04 16:46:44 -0700122 p4dp = p4d_offset(pgdp, addr);
123 if (unlikely(!p4dp))
124 return -EINVAL;
125
126 pudp = pud_offset(p4dp, addr);
Balbir Singh37bc3e52017-06-29 03:04:05 +1000127 if (unlikely(!pudp))
128 return -EINVAL;
129
130 pmdp = pmd_offset(pudp, addr);
131 if (unlikely(!pmdp))
132 return -EINVAL;
133
134 ptep = pte_offset_kernel(pmdp, addr);
135 if (unlikely(!ptep))
136 return -EINVAL;
137
138 pr_devel("clearing mm %p, pte %p, addr %lx\n", &init_mm, ptep, addr);
139
140 /*
141 * In hash, pte_clear flushes the tlb, in radix, we have to
142 */
143 pte_clear(&init_mm, addr, ptep);
144 flush_tlb_kernel_range(addr, addr + PAGE_SIZE);
145
146 return 0;
147}
148
Jordan Niethe94afd062020-05-06 13:40:31 +1000149static int do_patch_instruction(struct ppc_inst *addr, struct ppc_inst instr)
Balbir Singh37bc3e52017-06-29 03:04:05 +1000150{
151 int err;
Jordan Niethe94afd062020-05-06 13:40:31 +1000152 struct ppc_inst *patch_addr = NULL;
Balbir Singh37bc3e52017-06-29 03:04:05 +1000153 unsigned long flags;
154 unsigned long text_poke_addr;
155 unsigned long kaddr = (unsigned long)addr;
156
157 /*
158 * During early early boot patch_instruction is called
159 * when text_poke_area is not ready, but we still need
160 * to allow patching. We just do the plain old patching
Balbir Singh37bc3e52017-06-29 03:04:05 +1000161 */
Christophe Leroy8183d992017-11-24 08:31:09 +0100162 if (!this_cpu_read(text_poke_area))
Christophe Leroy8cf4c052017-11-24 08:31:07 +0100163 return raw_patch_instruction(addr, instr);
Balbir Singh37bc3e52017-06-29 03:04:05 +1000164
165 local_irq_save(flags);
166
167 text_poke_addr = (unsigned long)__this_cpu_read(text_poke_area)->addr;
168 if (map_patch_area(addr, text_poke_addr)) {
169 err = -1;
170 goto out;
171 }
172
Jordan Niethe94afd062020-05-06 13:40:31 +1000173 patch_addr = (struct ppc_inst *)(text_poke_addr + (kaddr & ~PAGE_MASK));
Balbir Singh37bc3e52017-06-29 03:04:05 +1000174
Christophe Leroy8cf4c052017-11-24 08:31:07 +0100175 __patch_instruction(addr, instr, patch_addr);
Balbir Singh37bc3e52017-06-29 03:04:05 +1000176
177 err = unmap_patch_area(text_poke_addr);
178 if (err)
179 pr_warn("failed to unmap %lx\n", text_poke_addr);
180
181out:
182 local_irq_restore(flags);
183
184 return err;
185}
186#else /* !CONFIG_STRICT_KERNEL_RWX */
187
Jordan Niethe94afd062020-05-06 13:40:31 +1000188static int do_patch_instruction(struct ppc_inst *addr, struct ppc_inst instr)
Balbir Singh37bc3e52017-06-29 03:04:05 +1000189{
Christophe Leroy8cf4c052017-11-24 08:31:07 +0100190 return raw_patch_instruction(addr, instr);
Balbir Singh37bc3e52017-06-29 03:04:05 +1000191}
192
193#endif /* CONFIG_STRICT_KERNEL_RWX */
Christophe Leroyb45ba4a2018-10-01 12:21:10 +0000194
Jordan Niethe94afd062020-05-06 13:40:31 +1000195int patch_instruction(struct ppc_inst *addr, struct ppc_inst instr)
Christophe Leroyb45ba4a2018-10-01 12:21:10 +0000196{
197 /* Make sure we aren't patching a freed init section */
198 if (init_mem_is_free && init_section_contains(addr, 4)) {
199 pr_debug("Skipping init section patching addr: 0x%px\n", addr);
200 return 0;
201 }
202 return do_patch_instruction(addr, instr);
203}
Balbir Singh37bc3e52017-06-29 03:04:05 +1000204NOKPROBE_SYMBOL(patch_instruction);
205
Jordan Niethe94afd062020-05-06 13:40:31 +1000206int patch_branch(struct ppc_inst *addr, unsigned long target, int flags)
Michael Ellermane7a572732008-06-24 11:32:22 +1000207{
Jordan Niethe94afd062020-05-06 13:40:31 +1000208 struct ppc_inst instr;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000209
210 create_branch(&instr, addr, target, flags);
211 return patch_instruction(addr, instr);
Michael Ellermane7a572732008-06-24 11:32:22 +1000212}
213
Anju Tebfa50d2017-02-08 14:27:30 +0530214bool is_offset_in_branch_range(long offset)
215{
216 /*
217 * Powerpc branch instruction is :
218 *
219 * 0 6 30 31
220 * +---------+----------------+---+---+
221 * | opcode | LI |AA |LK |
222 * +---------+----------------+---+---+
223 * Where AA = 0 and LK = 0
224 *
225 * LI is a signed 24 bits integer. The real branch offset is computed
226 * by: imm32 = SignExtend(LI:'0b00', 32);
227 *
228 * So the maximum forward branch should be:
229 * (0x007fffff << 2) = 0x01fffffc = 0x1fffffc
230 * The maximum backward branch should be:
231 * (0xff800000 << 2) = 0xfe000000 = -0x2000000
232 */
233 return (offset >= -0x2000000 && offset <= 0x1fffffc && !(offset & 0x3));
234}
235
Anju T51c9c082017-02-08 15:20:51 +0530236/*
237 * Helper to check if a given instruction is a conditional branch
238 * Derived from the conditional checks in analyse_instr()
239 */
Jordan Niethe94afd062020-05-06 13:40:31 +1000240bool is_conditional_branch(struct ppc_inst instr)
Anju T51c9c082017-02-08 15:20:51 +0530241{
Jordan Niethe80948922020-05-06 13:40:28 +1000242 unsigned int opcode = ppc_inst_primary_opcode(instr);
Anju T51c9c082017-02-08 15:20:51 +0530243
244 if (opcode == 16) /* bc, bca, bcl, bcla */
245 return true;
246 if (opcode == 19) {
Jordan Niethe777e26f2020-05-06 13:40:27 +1000247 switch ((ppc_inst_val(instr) >> 1) & 0x3ff) {
Anju T51c9c082017-02-08 15:20:51 +0530248 case 16: /* bclr, bclrl */
249 case 528: /* bcctr, bcctrl */
250 case 560: /* bctar, bctarl */
251 return true;
252 }
253 }
254 return false;
255}
Naveen N. Rao71f6e582017-04-12 16:48:51 +0530256NOKPROBE_SYMBOL(is_conditional_branch);
Anju T51c9c082017-02-08 15:20:51 +0530257
Jordan Niethe94afd062020-05-06 13:40:31 +1000258int create_branch(struct ppc_inst *instr,
259 const struct ppc_inst *addr,
Jordan Niethe7c95d882020-05-06 13:40:25 +1000260 unsigned long target, int flags)
Michael Ellermanaaddd3e2008-06-24 11:32:21 +1000261{
Michael Ellerman053a8582008-06-24 11:32:24 +1000262 long offset;
Michael Ellermanaaddd3e2008-06-24 11:32:21 +1000263
Jordan Niethe94afd062020-05-06 13:40:31 +1000264 *instr = ppc_inst(0);
Michael Ellerman053a8582008-06-24 11:32:24 +1000265 offset = target;
Michael Ellermanaaddd3e2008-06-24 11:32:21 +1000266 if (! (flags & BRANCH_ABSOLUTE))
Michael Ellerman053a8582008-06-24 11:32:24 +1000267 offset = offset - (unsigned long)addr;
268
269 /* Check we can represent the target in the instruction format */
Anju Tebfa50d2017-02-08 14:27:30 +0530270 if (!is_offset_in_branch_range(offset))
Jordan Niethe7c95d882020-05-06 13:40:25 +1000271 return 1;
Michael Ellermanaaddd3e2008-06-24 11:32:21 +1000272
273 /* Mask out the flags and target, so they don't step on each other. */
Jordan Niethe94afd062020-05-06 13:40:31 +1000274 *instr = ppc_inst(0x48000000 | (flags & 0x3) | (offset & 0x03FFFFFC));
Michael Ellermanaaddd3e2008-06-24 11:32:21 +1000275
Jordan Niethe7c95d882020-05-06 13:40:25 +1000276 return 0;
Michael Ellermanaaddd3e2008-06-24 11:32:21 +1000277}
Michael Ellerman411781a2008-06-24 11:32:29 +1000278
Jordan Niethe94afd062020-05-06 13:40:31 +1000279int create_cond_branch(struct ppc_inst *instr, const struct ppc_inst *addr,
Jordan Niethe7c95d882020-05-06 13:40:25 +1000280 unsigned long target, int flags)
Michael Ellerman411781a2008-06-24 11:32:29 +1000281{
Michael Ellerman411781a2008-06-24 11:32:29 +1000282 long offset;
283
284 offset = target;
285 if (! (flags & BRANCH_ABSOLUTE))
286 offset = offset - (unsigned long)addr;
287
288 /* Check we can represent the target in the instruction format */
289 if (offset < -0x8000 || offset > 0x7FFF || offset & 0x3)
Jordan Niethe7c95d882020-05-06 13:40:25 +1000290 return 1;
Michael Ellerman411781a2008-06-24 11:32:29 +1000291
292 /* Mask out the flags and target, so they don't step on each other. */
Jordan Niethe94afd062020-05-06 13:40:31 +1000293 *instr = ppc_inst(0x40000000 | (flags & 0x3FF0003) | (offset & 0xFFFC));
Michael Ellerman411781a2008-06-24 11:32:29 +1000294
Jordan Niethe7c95d882020-05-06 13:40:25 +1000295 return 0;
Michael Ellerman411781a2008-06-24 11:32:29 +1000296}
297
Jordan Niethe94afd062020-05-06 13:40:31 +1000298static unsigned int branch_opcode(struct ppc_inst instr)
Michael Ellerman411781a2008-06-24 11:32:29 +1000299{
Jordan Niethe80948922020-05-06 13:40:28 +1000300 return ppc_inst_primary_opcode(instr) & 0x3F;
Michael Ellerman411781a2008-06-24 11:32:29 +1000301}
302
Jordan Niethe94afd062020-05-06 13:40:31 +1000303static int instr_is_branch_iform(struct ppc_inst instr)
Michael Ellerman411781a2008-06-24 11:32:29 +1000304{
305 return branch_opcode(instr) == 18;
306}
307
Jordan Niethe94afd062020-05-06 13:40:31 +1000308static int instr_is_branch_bform(struct ppc_inst instr)
Michael Ellerman411781a2008-06-24 11:32:29 +1000309{
310 return branch_opcode(instr) == 16;
311}
312
Jordan Niethe94afd062020-05-06 13:40:31 +1000313int instr_is_relative_branch(struct ppc_inst instr)
Michael Ellerman411781a2008-06-24 11:32:29 +1000314{
Jordan Niethe777e26f2020-05-06 13:40:27 +1000315 if (ppc_inst_val(instr) & BRANCH_ABSOLUTE)
Michael Ellerman411781a2008-06-24 11:32:29 +1000316 return 0;
317
318 return instr_is_branch_iform(instr) || instr_is_branch_bform(instr);
319}
320
Jordan Niethe94afd062020-05-06 13:40:31 +1000321int instr_is_relative_link_branch(struct ppc_inst instr)
Josh Poimboeufb9eab082017-11-16 11:45:37 -0600322{
Jordan Niethe777e26f2020-05-06 13:40:27 +1000323 return instr_is_relative_branch(instr) && (ppc_inst_val(instr) & BRANCH_SET_LINK);
Josh Poimboeufb9eab082017-11-16 11:45:37 -0600324}
325
Jordan Niethe94afd062020-05-06 13:40:31 +1000326static unsigned long branch_iform_target(const struct ppc_inst *instr)
Michael Ellerman411781a2008-06-24 11:32:29 +1000327{
328 signed long imm;
329
Jordan Niethe777e26f2020-05-06 13:40:27 +1000330 imm = ppc_inst_val(*instr) & 0x3FFFFFC;
Michael Ellerman411781a2008-06-24 11:32:29 +1000331
332 /* If the top bit of the immediate value is set this is negative */
333 if (imm & 0x2000000)
334 imm -= 0x4000000;
335
Jordan Niethe777e26f2020-05-06 13:40:27 +1000336 if ((ppc_inst_val(*instr) & BRANCH_ABSOLUTE) == 0)
Michael Ellerman411781a2008-06-24 11:32:29 +1000337 imm += (unsigned long)instr;
338
339 return (unsigned long)imm;
340}
341
Jordan Niethe94afd062020-05-06 13:40:31 +1000342static unsigned long branch_bform_target(const struct ppc_inst *instr)
Michael Ellerman411781a2008-06-24 11:32:29 +1000343{
344 signed long imm;
345
Jordan Niethe777e26f2020-05-06 13:40:27 +1000346 imm = ppc_inst_val(*instr) & 0xFFFC;
Michael Ellerman411781a2008-06-24 11:32:29 +1000347
348 /* If the top bit of the immediate value is set this is negative */
349 if (imm & 0x8000)
350 imm -= 0x10000;
351
Jordan Niethe777e26f2020-05-06 13:40:27 +1000352 if ((ppc_inst_val(*instr) & BRANCH_ABSOLUTE) == 0)
Michael Ellerman411781a2008-06-24 11:32:29 +1000353 imm += (unsigned long)instr;
354
355 return (unsigned long)imm;
356}
357
Jordan Niethe94afd062020-05-06 13:40:31 +1000358unsigned long branch_target(const struct ppc_inst *instr)
Michael Ellerman411781a2008-06-24 11:32:29 +1000359{
Jordan Niethef8faaff2020-05-06 13:40:32 +1000360 if (instr_is_branch_iform(ppc_inst_read(instr)))
Michael Ellerman411781a2008-06-24 11:32:29 +1000361 return branch_iform_target(instr);
Jordan Niethef8faaff2020-05-06 13:40:32 +1000362 else if (instr_is_branch_bform(ppc_inst_read(instr)))
Michael Ellerman411781a2008-06-24 11:32:29 +1000363 return branch_bform_target(instr);
364
365 return 0;
366}
367
Jordan Niethe94afd062020-05-06 13:40:31 +1000368int instr_is_branch_to_addr(const struct ppc_inst *instr, unsigned long addr)
Michael Ellerman411781a2008-06-24 11:32:29 +1000369{
Jordan Niethef8faaff2020-05-06 13:40:32 +1000370 if (instr_is_branch_iform(ppc_inst_read(instr)) ||
371 instr_is_branch_bform(ppc_inst_read(instr)))
Michael Ellerman411781a2008-06-24 11:32:29 +1000372 return branch_target(instr) == addr;
373
374 return 0;
375}
376
Jordan Niethe94afd062020-05-06 13:40:31 +1000377int translate_branch(struct ppc_inst *instr, const struct ppc_inst *dest,
378 const struct ppc_inst *src)
Michael Ellerman411781a2008-06-24 11:32:29 +1000379{
380 unsigned long target;
Michael Ellerman411781a2008-06-24 11:32:29 +1000381 target = branch_target(src);
382
Jordan Niethef8faaff2020-05-06 13:40:32 +1000383 if (instr_is_branch_iform(ppc_inst_read(src)))
384 return create_branch(instr, dest, target,
385 ppc_inst_val(ppc_inst_read(src)));
386 else if (instr_is_branch_bform(ppc_inst_read(src)))
387 return create_cond_branch(instr, dest, target,
388 ppc_inst_val(ppc_inst_read(src)));
Michael Ellerman411781a2008-06-24 11:32:29 +1000389
Jordan Niethe7c95d882020-05-06 13:40:25 +1000390 return 1;
Michael Ellerman411781a2008-06-24 11:32:29 +1000391}
Michael Ellermanae0dc732008-06-24 11:32:32 +1000392
Kevin Hao1e8341a2013-05-12 07:26:22 +0800393#ifdef CONFIG_PPC_BOOK3E_64
394void __patch_exception(int exc, unsigned long addr)
395{
396 extern unsigned int interrupt_base_book3e;
397 unsigned int *ibase = &interrupt_base_book3e;
398
399 /* Our exceptions vectors start with a NOP and -then- a branch
400 * to deal with single stepping from userspace which stops on
401 * the second instruction. Thus we need to patch the second
402 * instruction of the exception, not the first one
403 */
404
Jordan Niethe94afd062020-05-06 13:40:31 +1000405 patch_branch((struct ppc_inst *)(ibase + (exc / 4) + 1), addr, 0);
Kevin Hao1e8341a2013-05-12 07:26:22 +0800406}
407#endif
Michael Ellermanae0dc732008-06-24 11:32:32 +1000408
409#ifdef CONFIG_CODE_PATCHING_SELFTEST
410
411static void __init test_trampoline(void)
412{
413 asm ("nop;\n");
414}
415
416#define check(x) \
417 if (!(x)) printk("code-patching: test failed at line %d\n", __LINE__);
418
419static void __init test_branch_iform(void)
420{
Jordan Niethe7c95d882020-05-06 13:40:25 +1000421 int err;
Jordan Niethe94afd062020-05-06 13:40:31 +1000422 struct ppc_inst instr;
Michael Ellermanae0dc732008-06-24 11:32:32 +1000423 unsigned long addr;
424
425 addr = (unsigned long)&instr;
426
427 /* The simplest case, branch to self, no flags */
Jordan Niethe75346252020-05-06 13:40:26 +1000428 check(instr_is_branch_iform(ppc_inst(0x48000000)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000429 /* All bits of target set, and flags */
Jordan Niethe75346252020-05-06 13:40:26 +1000430 check(instr_is_branch_iform(ppc_inst(0x4bffffff)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000431 /* High bit of opcode set, which is wrong */
Jordan Niethe75346252020-05-06 13:40:26 +1000432 check(!instr_is_branch_iform(ppc_inst(0xcbffffff)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000433 /* Middle bits of opcode set, which is wrong */
Jordan Niethe75346252020-05-06 13:40:26 +1000434 check(!instr_is_branch_iform(ppc_inst(0x7bffffff)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000435
436 /* Simplest case, branch to self with link */
Jordan Niethe75346252020-05-06 13:40:26 +1000437 check(instr_is_branch_iform(ppc_inst(0x48000001)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000438 /* All bits of targets set */
Jordan Niethe75346252020-05-06 13:40:26 +1000439 check(instr_is_branch_iform(ppc_inst(0x4bfffffd)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000440 /* Some bits of targets set */
Jordan Niethe75346252020-05-06 13:40:26 +1000441 check(instr_is_branch_iform(ppc_inst(0x4bff00fd)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000442 /* Must be a valid branch to start with */
Jordan Niethe75346252020-05-06 13:40:26 +1000443 check(!instr_is_branch_iform(ppc_inst(0x7bfffffd)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000444
445 /* Absolute branch to 0x100 */
Jordan Niethe75346252020-05-06 13:40:26 +1000446 instr = ppc_inst(0x48000103);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000447 check(instr_is_branch_to_addr(&instr, 0x100));
448 /* Absolute branch to 0x420fc */
Jordan Niethe75346252020-05-06 13:40:26 +1000449 instr = ppc_inst(0x480420ff);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000450 check(instr_is_branch_to_addr(&instr, 0x420fc));
451 /* Maximum positive relative branch, + 20MB - 4B */
Jordan Niethe75346252020-05-06 13:40:26 +1000452 instr = ppc_inst(0x49fffffc);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000453 check(instr_is_branch_to_addr(&instr, addr + 0x1FFFFFC));
454 /* Smallest negative relative branch, - 4B */
Jordan Niethe75346252020-05-06 13:40:26 +1000455 instr = ppc_inst(0x4bfffffc);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000456 check(instr_is_branch_to_addr(&instr, addr - 4));
457 /* Largest negative relative branch, - 32 MB */
Jordan Niethe75346252020-05-06 13:40:26 +1000458 instr = ppc_inst(0x4a000000);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000459 check(instr_is_branch_to_addr(&instr, addr - 0x2000000));
460
461 /* Branch to self, with link */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000462 err = create_branch(&instr, &instr, addr, BRANCH_SET_LINK);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000463 check(instr_is_branch_to_addr(&instr, addr));
464
465 /* Branch to self - 0x100, with link */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000466 err = create_branch(&instr, &instr, addr - 0x100, BRANCH_SET_LINK);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000467 check(instr_is_branch_to_addr(&instr, addr - 0x100));
468
469 /* Branch to self + 0x100, no link */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000470 err = create_branch(&instr, &instr, addr + 0x100, 0);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000471 check(instr_is_branch_to_addr(&instr, addr + 0x100));
472
473 /* Maximum relative negative offset, - 32 MB */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000474 err = create_branch(&instr, &instr, addr - 0x2000000, BRANCH_SET_LINK);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000475 check(instr_is_branch_to_addr(&instr, addr - 0x2000000));
476
477 /* Out of range relative negative offset, - 32 MB + 4*/
Jordan Niethe7c95d882020-05-06 13:40:25 +1000478 err = create_branch(&instr, &instr, addr - 0x2000004, BRANCH_SET_LINK);
479 check(err);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000480
481 /* Out of range relative positive offset, + 32 MB */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000482 err = create_branch(&instr, &instr, addr + 0x2000000, BRANCH_SET_LINK);
483 check(err);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000484
485 /* Unaligned target */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000486 err = create_branch(&instr, &instr, addr + 3, BRANCH_SET_LINK);
487 check(err);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000488
489 /* Check flags are masked correctly */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000490 err = create_branch(&instr, &instr, addr, 0xFFFFFFFC);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000491 check(instr_is_branch_to_addr(&instr, addr));
Jordan Niethe217862d2020-05-06 13:40:30 +1000492 check(ppc_inst_equal(instr, ppc_inst(0x48000000)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000493}
494
495static void __init test_create_function_call(void)
496{
Jordan Niethe94afd062020-05-06 13:40:31 +1000497 struct ppc_inst *iptr;
Michael Ellermanae0dc732008-06-24 11:32:32 +1000498 unsigned long dest;
Jordan Niethe94afd062020-05-06 13:40:31 +1000499 struct ppc_inst instr;
Michael Ellermanae0dc732008-06-24 11:32:32 +1000500
501 /* Check we can create a function call */
Jordan Niethe94afd062020-05-06 13:40:31 +1000502 iptr = (struct ppc_inst *)ppc_function_entry(test_trampoline);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000503 dest = ppc_function_entry(test_create_function_call);
Jordan Niethe7c95d882020-05-06 13:40:25 +1000504 create_branch(&instr, iptr, dest, BRANCH_SET_LINK);
505 patch_instruction(iptr, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000506 check(instr_is_branch_to_addr(iptr, dest));
507}
508
509static void __init test_branch_bform(void)
510{
Jordan Niethe7c95d882020-05-06 13:40:25 +1000511 int err;
Michael Ellermanae0dc732008-06-24 11:32:32 +1000512 unsigned long addr;
Jordan Niethe94afd062020-05-06 13:40:31 +1000513 struct ppc_inst *iptr, instr;
514 unsigned int flags;
Michael Ellermanae0dc732008-06-24 11:32:32 +1000515
516 iptr = &instr;
517 addr = (unsigned long)iptr;
518
519 /* The simplest case, branch to self, no flags */
Jordan Niethe75346252020-05-06 13:40:26 +1000520 check(instr_is_branch_bform(ppc_inst(0x40000000)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000521 /* All bits of target set, and flags */
Jordan Niethe75346252020-05-06 13:40:26 +1000522 check(instr_is_branch_bform(ppc_inst(0x43ffffff)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000523 /* High bit of opcode set, which is wrong */
Jordan Niethe75346252020-05-06 13:40:26 +1000524 check(!instr_is_branch_bform(ppc_inst(0xc3ffffff)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000525 /* Middle bits of opcode set, which is wrong */
Jordan Niethe75346252020-05-06 13:40:26 +1000526 check(!instr_is_branch_bform(ppc_inst(0x7bffffff)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000527
528 /* Absolute conditional branch to 0x100 */
Jordan Niethe75346252020-05-06 13:40:26 +1000529 instr = ppc_inst(0x43ff0103);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000530 check(instr_is_branch_to_addr(&instr, 0x100));
531 /* Absolute conditional branch to 0x20fc */
Jordan Niethe75346252020-05-06 13:40:26 +1000532 instr = ppc_inst(0x43ff20ff);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000533 check(instr_is_branch_to_addr(&instr, 0x20fc));
534 /* Maximum positive relative conditional branch, + 32 KB - 4B */
Jordan Niethe75346252020-05-06 13:40:26 +1000535 instr = ppc_inst(0x43ff7ffc);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000536 check(instr_is_branch_to_addr(&instr, addr + 0x7FFC));
537 /* Smallest negative relative conditional branch, - 4B */
Jordan Niethe75346252020-05-06 13:40:26 +1000538 instr = ppc_inst(0x43fffffc);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000539 check(instr_is_branch_to_addr(&instr, addr - 4));
540 /* Largest negative relative conditional branch, - 32 KB */
Jordan Niethe75346252020-05-06 13:40:26 +1000541 instr = ppc_inst(0x43ff8000);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000542 check(instr_is_branch_to_addr(&instr, addr - 0x8000));
543
544 /* All condition code bits set & link */
545 flags = 0x3ff000 | BRANCH_SET_LINK;
546
547 /* Branch to self */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000548 err = create_cond_branch(&instr, iptr, addr, flags);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000549 check(instr_is_branch_to_addr(&instr, addr));
550
551 /* Branch to self - 0x100 */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000552 err = create_cond_branch(&instr, iptr, addr - 0x100, flags);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000553 check(instr_is_branch_to_addr(&instr, addr - 0x100));
554
555 /* Branch to self + 0x100 */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000556 err = create_cond_branch(&instr, iptr, addr + 0x100, flags);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000557 check(instr_is_branch_to_addr(&instr, addr + 0x100));
558
559 /* Maximum relative negative offset, - 32 KB */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000560 err = create_cond_branch(&instr, iptr, addr - 0x8000, flags);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000561 check(instr_is_branch_to_addr(&instr, addr - 0x8000));
562
563 /* Out of range relative negative offset, - 32 KB + 4*/
Jordan Niethe7c95d882020-05-06 13:40:25 +1000564 err = create_cond_branch(&instr, iptr, addr - 0x8004, flags);
565 check(err);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000566
567 /* Out of range relative positive offset, + 32 KB */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000568 err = create_cond_branch(&instr, iptr, addr + 0x8000, flags);
569 check(err);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000570
571 /* Unaligned target */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000572 err = create_cond_branch(&instr, iptr, addr + 3, flags);
573 check(err);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000574
575 /* Check flags are masked correctly */
Jordan Niethe7c95d882020-05-06 13:40:25 +1000576 err = create_cond_branch(&instr, iptr, addr, 0xFFFFFFFC);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000577 check(instr_is_branch_to_addr(&instr, addr));
Jordan Niethe217862d2020-05-06 13:40:30 +1000578 check(ppc_inst_equal(instr, ppc_inst(0x43FF0000)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000579}
580
581static void __init test_translate_branch(void)
582{
583 unsigned long addr;
Jordan Niethe0b582db2020-05-06 13:40:40 +1000584 void *p, *q;
Jordan Niethe94afd062020-05-06 13:40:31 +1000585 struct ppc_inst instr;
Michael Ellermanae0dc732008-06-24 11:32:32 +1000586 void *buf;
587
588 buf = vmalloc(PAGE_ALIGN(0x2000000 + 1));
589 check(buf);
590 if (!buf)
591 return;
592
593 /* Simple case, branch to self moved a little */
594 p = buf;
595 addr = (unsigned long)p;
596 patch_branch(p, addr, 0);
597 check(instr_is_branch_to_addr(p, addr));
Jordan Niethe0b582db2020-05-06 13:40:40 +1000598 q = p + 4;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000599 translate_branch(&instr, q, p);
600 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000601 check(instr_is_branch_to_addr(q, addr));
602
603 /* Maximum negative case, move b . to addr + 32 MB */
604 p = buf;
605 addr = (unsigned long)p;
606 patch_branch(p, addr, 0);
607 q = buf + 0x2000000;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000608 translate_branch(&instr, q, p);
609 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000610 check(instr_is_branch_to_addr(p, addr));
611 check(instr_is_branch_to_addr(q, addr));
Jordan Niethef8faaff2020-05-06 13:40:32 +1000612 check(ppc_inst_equal(ppc_inst_read(q), ppc_inst(0x4a000000)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000613
614 /* Maximum positive case, move x to x - 32 MB + 4 */
615 p = buf + 0x2000000;
616 addr = (unsigned long)p;
617 patch_branch(p, addr, 0);
618 q = buf + 4;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000619 translate_branch(&instr, q, p);
620 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000621 check(instr_is_branch_to_addr(p, addr));
622 check(instr_is_branch_to_addr(q, addr));
Jordan Niethef8faaff2020-05-06 13:40:32 +1000623 check(ppc_inst_equal(ppc_inst_read(q), ppc_inst(0x49fffffc)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000624
625 /* Jump to x + 16 MB moved to x + 20 MB */
626 p = buf;
627 addr = 0x1000000 + (unsigned long)buf;
628 patch_branch(p, addr, BRANCH_SET_LINK);
629 q = buf + 0x1400000;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000630 translate_branch(&instr, q, p);
631 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000632 check(instr_is_branch_to_addr(p, addr));
633 check(instr_is_branch_to_addr(q, addr));
634
635 /* Jump to x + 16 MB moved to x - 16 MB + 4 */
636 p = buf + 0x1000000;
637 addr = 0x2000000 + (unsigned long)buf;
638 patch_branch(p, addr, 0);
639 q = buf + 4;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000640 translate_branch(&instr, q, p);
641 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000642 check(instr_is_branch_to_addr(p, addr));
643 check(instr_is_branch_to_addr(q, addr));
644
645
646 /* Conditional branch tests */
647
648 /* Simple case, branch to self moved a little */
649 p = buf;
650 addr = (unsigned long)p;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000651 create_cond_branch(&instr, p, addr, 0);
652 patch_instruction(p, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000653 check(instr_is_branch_to_addr(p, addr));
Jordan Niethe0b582db2020-05-06 13:40:40 +1000654 q = buf + 4;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000655 translate_branch(&instr, q, p);
656 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000657 check(instr_is_branch_to_addr(q, addr));
658
659 /* Maximum negative case, move b . to addr + 32 KB */
660 p = buf;
661 addr = (unsigned long)p;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000662 create_cond_branch(&instr, p, addr, 0xFFFFFFFC);
663 patch_instruction(p, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000664 q = buf + 0x8000;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000665 translate_branch(&instr, q, p);
666 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000667 check(instr_is_branch_to_addr(p, addr));
668 check(instr_is_branch_to_addr(q, addr));
Jordan Niethef8faaff2020-05-06 13:40:32 +1000669 check(ppc_inst_equal(ppc_inst_read(q), ppc_inst(0x43ff8000)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000670
671 /* Maximum positive case, move x to x - 32 KB + 4 */
672 p = buf + 0x8000;
673 addr = (unsigned long)p;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000674 create_cond_branch(&instr, p, addr, 0xFFFFFFFC);
675 patch_instruction(p, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000676 q = buf + 4;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000677 translate_branch(&instr, q, p);
678 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000679 check(instr_is_branch_to_addr(p, addr));
680 check(instr_is_branch_to_addr(q, addr));
Jordan Niethef8faaff2020-05-06 13:40:32 +1000681 check(ppc_inst_equal(ppc_inst_read(q), ppc_inst(0x43ff7ffc)));
Michael Ellermanae0dc732008-06-24 11:32:32 +1000682
683 /* Jump to x + 12 KB moved to x + 20 KB */
684 p = buf;
685 addr = 0x3000 + (unsigned long)buf;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000686 create_cond_branch(&instr, p, addr, BRANCH_SET_LINK);
687 patch_instruction(p, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000688 q = buf + 0x5000;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000689 translate_branch(&instr, q, p);
690 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000691 check(instr_is_branch_to_addr(p, addr));
692 check(instr_is_branch_to_addr(q, addr));
693
694 /* Jump to x + 8 KB moved to x - 8 KB + 4 */
695 p = buf + 0x2000;
696 addr = 0x4000 + (unsigned long)buf;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000697 create_cond_branch(&instr, p, addr, 0);
698 patch_instruction(p, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000699 q = buf + 4;
Jordan Niethe7c95d882020-05-06 13:40:25 +1000700 translate_branch(&instr, q, p);
701 patch_instruction(q, instr);
Michael Ellermanae0dc732008-06-24 11:32:32 +1000702 check(instr_is_branch_to_addr(p, addr));
703 check(instr_is_branch_to_addr(q, addr));
704
705 /* Free the buffer we were using */
706 vfree(buf);
707}
708
Jordan Niethef77f8ff2020-05-06 13:40:44 +1000709#ifdef CONFIG_PPC64
710static void __init test_prefixed_patching(void)
711{
712 extern unsigned int code_patching_test1[];
713 extern unsigned int code_patching_test1_expected[];
714 extern unsigned int end_code_patching_test1[];
715
716 __patch_instruction((struct ppc_inst *)code_patching_test1,
717 ppc_inst_prefix(OP_PREFIX << 26, 0x00000000),
718 (struct ppc_inst *)code_patching_test1);
719
720 check(!memcmp(code_patching_test1,
721 code_patching_test1_expected,
722 sizeof(unsigned int) *
723 (end_code_patching_test1 - code_patching_test1)));
724}
725#else
726static inline void test_prefixed_patching(void) {}
727#endif
728
Michael Ellermanae0dc732008-06-24 11:32:32 +1000729static int __init test_code_patching(void)
730{
731 printk(KERN_DEBUG "Running code patching self-tests ...\n");
732
733 test_branch_iform();
734 test_branch_bform();
735 test_create_function_call();
736 test_translate_branch();
Jordan Niethef77f8ff2020-05-06 13:40:44 +1000737 test_prefixed_patching();
Michael Ellermanae0dc732008-06-24 11:32:32 +1000738
739 return 0;
740}
741late_initcall(test_code_patching);
742
743#endif /* CONFIG_CODE_PATCHING_SELFTEST */