Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 1 | /* |
| 2 | * This file is subject to the terms and conditions of the GNU General Public |
| 3 | * License. See the file "COPYING" in the main directory of this archive |
| 4 | * for more details. |
| 5 | * |
| 6 | * A small micro-assembler. It is intentionally kept simple, does only |
| 7 | * support a subset of instructions, and does not try to hide pipeline |
| 8 | * effects like branch delay slots. |
| 9 | * |
| 10 | * Copyright (C) 2004, 2005, 2006, 2008 Thiemo Seufer |
| 11 | * Copyright (C) 2005, 2007 Maciej W. Rozycki |
| 12 | * Copyright (C) 2006 Ralf Baechle (ralf@linux-mips.org) |
| 13 | * Copyright (C) 2012, 2013 MIPS Technologies, Inc. All rights reserved. |
| 14 | */ |
| 15 | |
| 16 | #include <linux/kernel.h> |
| 17 | #include <linux/types.h> |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 18 | |
| 19 | #include <asm/inst.h> |
| 20 | #include <asm/elf.h> |
| 21 | #include <asm/bugs.h> |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 22 | #include <asm/uasm.h> |
| 23 | |
| 24 | #define RS_MASK 0x1f |
| 25 | #define RS_SH 16 |
| 26 | #define RT_MASK 0x1f |
| 27 | #define RT_SH 21 |
| 28 | #define SCIMM_MASK 0x3ff |
| 29 | #define SCIMM_SH 16 |
| 30 | |
| 31 | /* This macro sets the non-variable bits of an instruction. */ |
| 32 | #define M(a, b, c, d, e, f) \ |
| 33 | ((a) << OP_SH \ |
| 34 | | (b) << RT_SH \ |
| 35 | | (c) << RS_SH \ |
| 36 | | (d) << RD_SH \ |
| 37 | | (e) << RE_SH \ |
| 38 | | (f) << FUNC_SH) |
| 39 | |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 40 | #include "uasm.c" |
| 41 | |
James Hogan | ccf85c7 | 2017-12-07 07:14:17 +0000 | [diff] [blame] | 42 | static const struct insn insn_table_MM[insn_invalid] = { |
David Daney | ce807d5 | 2017-06-13 15:28:43 -0700 | [diff] [blame] | 43 | [insn_addu] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_addu32_op), RT | RS | RD}, |
| 44 | [insn_addiu] = {M(mm_addiu32_op, 0, 0, 0, 0, 0), RT | RS | SIMM}, |
| 45 | [insn_and] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_and_op), RT | RS | RD}, |
| 46 | [insn_andi] = {M(mm_andi32_op, 0, 0, 0, 0, 0), RT | RS | UIMM}, |
| 47 | [insn_beq] = {M(mm_beq32_op, 0, 0, 0, 0, 0), RS | RT | BIMM}, |
| 48 | [insn_beql] = {0, 0}, |
| 49 | [insn_bgez] = {M(mm_pool32i_op, mm_bgez_op, 0, 0, 0, 0), RS | BIMM}, |
| 50 | [insn_bgezl] = {0, 0}, |
| 51 | [insn_bltz] = {M(mm_pool32i_op, mm_bltz_op, 0, 0, 0, 0), RS | BIMM}, |
| 52 | [insn_bltzl] = {0, 0}, |
| 53 | [insn_bne] = {M(mm_bne32_op, 0, 0, 0, 0, 0), RT | RS | BIMM}, |
| 54 | [insn_cache] = {M(mm_pool32b_op, 0, 0, mm_cache_func, 0, 0), RT | RS | SIMM}, |
| 55 | [insn_cfc1] = {M(mm_pool32f_op, 0, 0, 0, mm_cfc1_op, mm_32f_73_op), RT | RS}, |
| 56 | [insn_cfcmsa] = {M(mm_pool32s_op, 0, msa_cfc_op, 0, 0, mm_32s_elm_op), RD | RE}, |
| 57 | [insn_ctc1] = {M(mm_pool32f_op, 0, 0, 0, mm_ctc1_op, mm_32f_73_op), RT | RS}, |
| 58 | [insn_ctcmsa] = {M(mm_pool32s_op, 0, msa_ctc_op, 0, 0, mm_32s_elm_op), RD | RE}, |
| 59 | [insn_daddu] = {0, 0}, |
| 60 | [insn_daddiu] = {0, 0}, |
| 61 | [insn_di] = {M(mm_pool32a_op, 0, 0, 0, mm_di_op, mm_pool32axf_op), RS}, |
| 62 | [insn_divu] = {M(mm_pool32a_op, 0, 0, 0, mm_divu_op, mm_pool32axf_op), RT | RS}, |
| 63 | [insn_dmfc0] = {0, 0}, |
| 64 | [insn_dmtc0] = {0, 0}, |
| 65 | [insn_dsll] = {0, 0}, |
| 66 | [insn_dsll32] = {0, 0}, |
| 67 | [insn_dsra] = {0, 0}, |
| 68 | [insn_dsrl] = {0, 0}, |
| 69 | [insn_dsrl32] = {0, 0}, |
| 70 | [insn_drotr] = {0, 0}, |
| 71 | [insn_drotr32] = {0, 0}, |
| 72 | [insn_dsubu] = {0, 0}, |
| 73 | [insn_eret] = {M(mm_pool32a_op, 0, 0, 0, mm_eret_op, mm_pool32axf_op), 0}, |
| 74 | [insn_ins] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_ins_op), RT | RS | RD | RE}, |
| 75 | [insn_ext] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_ext_op), RT | RS | RD | RE}, |
| 76 | [insn_j] = {M(mm_j32_op, 0, 0, 0, 0, 0), JIMM}, |
| 77 | [insn_jal] = {M(mm_jal32_op, 0, 0, 0, 0, 0), JIMM}, |
| 78 | [insn_jalr] = {M(mm_pool32a_op, 0, 0, 0, mm_jalr_op, mm_pool32axf_op), RT | RS}, |
| 79 | [insn_jr] = {M(mm_pool32a_op, 0, 0, 0, mm_jalr_op, mm_pool32axf_op), RS}, |
| 80 | [insn_lb] = {M(mm_lb32_op, 0, 0, 0, 0, 0), RT | RS | SIMM}, |
| 81 | [insn_ld] = {0, 0}, |
Gustavo A. R. Silva | 77238e7 | 2017-10-31 00:35:03 -0500 | [diff] [blame] | 82 | [insn_lh] = {M(mm_lh32_op, 0, 0, 0, 0, 0), RT | RS | SIMM}, |
David Daney | ce807d5 | 2017-06-13 15:28:43 -0700 | [diff] [blame] | 83 | [insn_ll] = {M(mm_pool32c_op, 0, 0, (mm_ll_func << 1), 0, 0), RS | RT | SIMM}, |
| 84 | [insn_lld] = {0, 0}, |
| 85 | [insn_lui] = {M(mm_pool32i_op, mm_lui_op, 0, 0, 0, 0), RS | SIMM}, |
| 86 | [insn_lw] = {M(mm_lw32_op, 0, 0, 0, 0, 0), RT | RS | SIMM}, |
| 87 | [insn_mfc0] = {M(mm_pool32a_op, 0, 0, 0, mm_mfc0_op, mm_pool32axf_op), RT | RS | RD}, |
| 88 | [insn_mfhi] = {M(mm_pool32a_op, 0, 0, 0, mm_mfhi32_op, mm_pool32axf_op), RS}, |
| 89 | [insn_mflo] = {M(mm_pool32a_op, 0, 0, 0, mm_mflo32_op, mm_pool32axf_op), RS}, |
| 90 | [insn_mtc0] = {M(mm_pool32a_op, 0, 0, 0, mm_mtc0_op, mm_pool32axf_op), RT | RS | RD}, |
| 91 | [insn_mthi] = {M(mm_pool32a_op, 0, 0, 0, mm_mthi32_op, mm_pool32axf_op), RS}, |
| 92 | [insn_mtlo] = {M(mm_pool32a_op, 0, 0, 0, mm_mtlo32_op, mm_pool32axf_op), RS}, |
| 93 | [insn_mul] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_mul_op), RT | RS | RD}, |
| 94 | [insn_or] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_or32_op), RT | RS | RD}, |
| 95 | [insn_ori] = {M(mm_ori32_op, 0, 0, 0, 0, 0), RT | RS | UIMM}, |
| 96 | [insn_pref] = {M(mm_pool32c_op, 0, 0, (mm_pref_func << 1), 0, 0), RT | RS | SIMM}, |
| 97 | [insn_rfe] = {0, 0}, |
| 98 | [insn_sc] = {M(mm_pool32c_op, 0, 0, (mm_sc_func << 1), 0, 0), RT | RS | SIMM}, |
| 99 | [insn_scd] = {0, 0}, |
| 100 | [insn_sd] = {0, 0}, |
| 101 | [insn_sll] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_sll32_op), RT | RS | RD}, |
| 102 | [insn_sllv] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_sllv32_op), RT | RS | RD}, |
| 103 | [insn_slt] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_slt_op), RT | RS | RD}, |
| 104 | [insn_sltiu] = {M(mm_sltiu32_op, 0, 0, 0, 0, 0), RT | RS | SIMM}, |
| 105 | [insn_sltu] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_sltu_op), RT | RS | RD}, |
| 106 | [insn_sra] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_sra_op), RT | RS | RD}, |
Jiong Wang | ee94b90 | 2018-12-05 13:52:30 -0500 | [diff] [blame] | 107 | [insn_srav] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_srav_op), RT | RS | RD}, |
David Daney | ce807d5 | 2017-06-13 15:28:43 -0700 | [diff] [blame] | 108 | [insn_srl] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_srl32_op), RT | RS | RD}, |
| 109 | [insn_srlv] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_srlv32_op), RT | RS | RD}, |
| 110 | [insn_rotr] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_rotr_op), RT | RS | RD}, |
| 111 | [insn_subu] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_subu32_op), RT | RS | RD}, |
| 112 | [insn_sw] = {M(mm_sw32_op, 0, 0, 0, 0, 0), RT | RS | SIMM}, |
| 113 | [insn_sync] = {M(mm_pool32a_op, 0, 0, 0, mm_sync_op, mm_pool32axf_op), RS}, |
| 114 | [insn_tlbp] = {M(mm_pool32a_op, 0, 0, 0, mm_tlbp_op, mm_pool32axf_op), 0}, |
| 115 | [insn_tlbr] = {M(mm_pool32a_op, 0, 0, 0, mm_tlbr_op, mm_pool32axf_op), 0}, |
| 116 | [insn_tlbwi] = {M(mm_pool32a_op, 0, 0, 0, mm_tlbwi_op, mm_pool32axf_op), 0}, |
| 117 | [insn_tlbwr] = {M(mm_pool32a_op, 0, 0, 0, mm_tlbwr_op, mm_pool32axf_op), 0}, |
| 118 | [insn_wait] = {M(mm_pool32a_op, 0, 0, 0, mm_wait_op, mm_pool32axf_op), SCIMM}, |
| 119 | [insn_wsbh] = {M(mm_pool32a_op, 0, 0, 0, mm_wsbh_op, mm_pool32axf_op), RT | RS}, |
| 120 | [insn_xor] = {M(mm_pool32a_op, 0, 0, 0, 0, mm_xor32_op), RT | RS | RD}, |
| 121 | [insn_xori] = {M(mm_xori32_op, 0, 0, 0, 0, 0), RT | RS | UIMM}, |
| 122 | [insn_dins] = {0, 0}, |
| 123 | [insn_dinsm] = {0, 0}, |
| 124 | [insn_syscall] = {M(mm_pool32a_op, 0, 0, 0, mm_syscall_op, mm_pool32axf_op), SCIMM}, |
| 125 | [insn_bbit0] = {0, 0}, |
| 126 | [insn_bbit1] = {0, 0}, |
| 127 | [insn_lwx] = {0, 0}, |
| 128 | [insn_ldx] = {0, 0}, |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 129 | }; |
| 130 | |
| 131 | #undef M |
| 132 | |
Paul Gortmaker | 078a55f | 2013-06-18 13:38:59 +0000 | [diff] [blame] | 133 | static inline u32 build_bimm(s32 arg) |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 134 | { |
| 135 | WARN(arg > 0xffff || arg < -0x10000, |
| 136 | KERN_WARNING "Micro-assembler field overflow\n"); |
| 137 | |
| 138 | WARN(arg & 0x3, KERN_WARNING "Invalid micro-assembler branch target\n"); |
| 139 | |
| 140 | return ((arg < 0) ? (1 << 15) : 0) | ((arg >> 1) & 0x7fff); |
| 141 | } |
| 142 | |
Paul Gortmaker | 078a55f | 2013-06-18 13:38:59 +0000 | [diff] [blame] | 143 | static inline u32 build_jimm(u32 arg) |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 144 | { |
Steven J. Hill | 8fe4bb9 | 2013-03-25 12:07:40 -0500 | [diff] [blame] | 145 | |
| 146 | WARN(arg & ~((JIMM_MASK << 2) | 1), |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 147 | KERN_WARNING "Micro-assembler field overflow\n"); |
| 148 | |
| 149 | return (arg >> 1) & JIMM_MASK; |
| 150 | } |
| 151 | |
| 152 | /* |
| 153 | * The order of opcode arguments is implicitly left to right, |
| 154 | * starting with RS and ending with FUNC or IMM. |
| 155 | */ |
Paul Gortmaker | 078a55f | 2013-06-18 13:38:59 +0000 | [diff] [blame] | 156 | static void build_insn(u32 **buf, enum opcode opc, ...) |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 157 | { |
David Daney | ce807d5 | 2017-06-13 15:28:43 -0700 | [diff] [blame] | 158 | const struct insn *ip; |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 159 | va_list ap; |
| 160 | u32 op; |
| 161 | |
David Daney | ce807d5 | 2017-06-13 15:28:43 -0700 | [diff] [blame] | 162 | if (opc < 0 || opc >= insn_invalid || |
| 163 | (opc == insn_daddiu && r4k_daddiu_bug()) || |
| 164 | (insn_table_MM[opc].match == 0 && insn_table_MM[opc].fields == 0)) |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 165 | panic("Unsupported Micro-assembler instruction %d", opc); |
| 166 | |
David Daney | ce807d5 | 2017-06-13 15:28:43 -0700 | [diff] [blame] | 167 | ip = &insn_table_MM[opc]; |
| 168 | |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 169 | op = ip->match; |
| 170 | va_start(ap, opc); |
| 171 | if (ip->fields & RS) { |
James Hogan | c29732a | 2016-06-23 17:34:34 +0100 | [diff] [blame] | 172 | if (opc == insn_mfc0 || opc == insn_mtc0 || |
| 173 | opc == insn_cfc1 || opc == insn_ctc1) |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 174 | op |= build_rt(va_arg(ap, u32)); |
| 175 | else |
| 176 | op |= build_rs(va_arg(ap, u32)); |
| 177 | } |
| 178 | if (ip->fields & RT) { |
James Hogan | c29732a | 2016-06-23 17:34:34 +0100 | [diff] [blame] | 179 | if (opc == insn_mfc0 || opc == insn_mtc0 || |
| 180 | opc == insn_cfc1 || opc == insn_ctc1) |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 181 | op |= build_rs(va_arg(ap, u32)); |
| 182 | else |
| 183 | op |= build_rt(va_arg(ap, u32)); |
| 184 | } |
| 185 | if (ip->fields & RD) |
| 186 | op |= build_rd(va_arg(ap, u32)); |
| 187 | if (ip->fields & RE) |
| 188 | op |= build_re(va_arg(ap, u32)); |
| 189 | if (ip->fields & SIMM) |
| 190 | op |= build_simm(va_arg(ap, s32)); |
| 191 | if (ip->fields & UIMM) |
| 192 | op |= build_uimm(va_arg(ap, u32)); |
| 193 | if (ip->fields & BIMM) |
| 194 | op |= build_bimm(va_arg(ap, s32)); |
| 195 | if (ip->fields & JIMM) |
| 196 | op |= build_jimm(va_arg(ap, u32)); |
| 197 | if (ip->fields & FUNC) |
| 198 | op |= build_func(va_arg(ap, u32)); |
| 199 | if (ip->fields & SET) |
| 200 | op |= build_set(va_arg(ap, u32)); |
| 201 | if (ip->fields & SCIMM) |
| 202 | op |= build_scimm(va_arg(ap, u32)); |
| 203 | va_end(ap); |
| 204 | |
| 205 | #ifdef CONFIG_CPU_LITTLE_ENDIAN |
| 206 | **buf = ((op & 0xffff) << 16) | (op >> 16); |
| 207 | #else |
| 208 | **buf = op; |
| 209 | #endif |
| 210 | (*buf)++; |
| 211 | } |
| 212 | |
Paul Gortmaker | 078a55f | 2013-06-18 13:38:59 +0000 | [diff] [blame] | 213 | static inline void |
Steven J. Hill | a6a4834 | 2013-02-05 16:52:02 -0600 | [diff] [blame] | 214 | __resolve_relocs(struct uasm_reloc *rel, struct uasm_label *lab) |
| 215 | { |
| 216 | long laddr = (long)lab->addr; |
| 217 | long raddr = (long)rel->addr; |
| 218 | |
| 219 | switch (rel->type) { |
| 220 | case R_MIPS_PC16: |
| 221 | #ifdef CONFIG_CPU_LITTLE_ENDIAN |
| 222 | *rel->addr |= (build_bimm(laddr - (raddr + 4)) << 16); |
| 223 | #else |
| 224 | *rel->addr |= build_bimm(laddr - (raddr + 4)); |
| 225 | #endif |
| 226 | break; |
| 227 | |
| 228 | default: |
| 229 | panic("Unsupported Micro-assembler relocation %d", |
| 230 | rel->type); |
| 231 | } |
| 232 | } |