Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |
| 18 | #define ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 19 | |
| 20 | #include "invoke_type.h" |
| 21 | #include "compiled_method.h" |
| 22 | #include "dex/compiler_enums.h" |
| 23 | #include "dex/compiler_ir.h" |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 24 | #include "dex/reg_storage.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 25 | #include "dex/backend.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 26 | #include "driver/compiler_driver.h" |
Brian Carlstrom | a1ce1fe | 2014-02-24 23:23:58 -0800 | [diff] [blame] | 27 | #include "leb128.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 28 | #include "safe_map.h" |
Vladimir Marko | 089142c | 2014-06-05 10:57:05 +0100 | [diff] [blame] | 29 | #include "utils/array_ref.h" |
Nicolas Geoffray | 818f210 | 2014-02-18 16:43:35 +0000 | [diff] [blame] | 30 | #include "utils/arena_allocator.h" |
| 31 | #include "utils/growable_array.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 32 | |
| 33 | namespace art { |
| 34 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 35 | /* |
| 36 | * TODO: refactoring pass to move these (and other) typdefs towards usage style of runtime to |
| 37 | * add type safety (see runtime/offsets.h). |
| 38 | */ |
| 39 | typedef uint32_t DexOffset; // Dex offset in code units. |
| 40 | typedef uint16_t NarrowDexOffset; // For use in structs, Dex offsets range from 0 .. 0xffff. |
| 41 | typedef uint32_t CodeOffset; // Native code offset in bytes. |
| 42 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 43 | // Set to 1 to measure cost of suspend check. |
| 44 | #define NO_SUSPEND 0 |
| 45 | |
| 46 | #define IS_BINARY_OP (1ULL << kIsBinaryOp) |
| 47 | #define IS_BRANCH (1ULL << kIsBranch) |
| 48 | #define IS_IT (1ULL << kIsIT) |
| 49 | #define IS_LOAD (1ULL << kMemLoad) |
| 50 | #define IS_QUAD_OP (1ULL << kIsQuadOp) |
| 51 | #define IS_QUIN_OP (1ULL << kIsQuinOp) |
| 52 | #define IS_SEXTUPLE_OP (1ULL << kIsSextupleOp) |
| 53 | #define IS_STORE (1ULL << kMemStore) |
| 54 | #define IS_TERTIARY_OP (1ULL << kIsTertiaryOp) |
| 55 | #define IS_UNARY_OP (1ULL << kIsUnaryOp) |
| 56 | #define NEEDS_FIXUP (1ULL << kPCRelFixup) |
| 57 | #define NO_OPERAND (1ULL << kNoOperand) |
| 58 | #define REG_DEF0 (1ULL << kRegDef0) |
| 59 | #define REG_DEF1 (1ULL << kRegDef1) |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 60 | #define REG_DEF2 (1ULL << kRegDef2) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 61 | #define REG_DEFA (1ULL << kRegDefA) |
| 62 | #define REG_DEFD (1ULL << kRegDefD) |
| 63 | #define REG_DEF_FPCS_LIST0 (1ULL << kRegDefFPCSList0) |
| 64 | #define REG_DEF_FPCS_LIST2 (1ULL << kRegDefFPCSList2) |
| 65 | #define REG_DEF_LIST0 (1ULL << kRegDefList0) |
| 66 | #define REG_DEF_LIST1 (1ULL << kRegDefList1) |
| 67 | #define REG_DEF_LR (1ULL << kRegDefLR) |
| 68 | #define REG_DEF_SP (1ULL << kRegDefSP) |
| 69 | #define REG_USE0 (1ULL << kRegUse0) |
| 70 | #define REG_USE1 (1ULL << kRegUse1) |
| 71 | #define REG_USE2 (1ULL << kRegUse2) |
| 72 | #define REG_USE3 (1ULL << kRegUse3) |
| 73 | #define REG_USE4 (1ULL << kRegUse4) |
| 74 | #define REG_USEA (1ULL << kRegUseA) |
| 75 | #define REG_USEC (1ULL << kRegUseC) |
| 76 | #define REG_USED (1ULL << kRegUseD) |
Vladimir Marko | 70b797d | 2013-12-03 15:25:24 +0000 | [diff] [blame] | 77 | #define REG_USEB (1ULL << kRegUseB) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 78 | #define REG_USE_FPCS_LIST0 (1ULL << kRegUseFPCSList0) |
| 79 | #define REG_USE_FPCS_LIST2 (1ULL << kRegUseFPCSList2) |
| 80 | #define REG_USE_LIST0 (1ULL << kRegUseList0) |
| 81 | #define REG_USE_LIST1 (1ULL << kRegUseList1) |
| 82 | #define REG_USE_LR (1ULL << kRegUseLR) |
| 83 | #define REG_USE_PC (1ULL << kRegUsePC) |
| 84 | #define REG_USE_SP (1ULL << kRegUseSP) |
| 85 | #define SETS_CCODES (1ULL << kSetsCCodes) |
| 86 | #define USES_CCODES (1ULL << kUsesCCodes) |
Serguei Katkov | e90501d | 2014-03-12 15:56:54 +0700 | [diff] [blame] | 87 | #define USE_FP_STACK (1ULL << kUseFpStack) |
buzbee | 9da5c10 | 2014-03-28 12:59:18 -0700 | [diff] [blame] | 88 | #define REG_USE_LO (1ULL << kUseLo) |
| 89 | #define REG_USE_HI (1ULL << kUseHi) |
| 90 | #define REG_DEF_LO (1ULL << kDefLo) |
| 91 | #define REG_DEF_HI (1ULL << kDefHi) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 92 | |
| 93 | // Common combo register usage patterns. |
| 94 | #define REG_DEF01 (REG_DEF0 | REG_DEF1) |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 95 | #define REG_DEF012 (REG_DEF0 | REG_DEF1 | REG_DEF2) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 96 | #define REG_DEF01_USE2 (REG_DEF0 | REG_DEF1 | REG_USE2) |
| 97 | #define REG_DEF0_USE01 (REG_DEF0 | REG_USE01) |
| 98 | #define REG_DEF0_USE0 (REG_DEF0 | REG_USE0) |
| 99 | #define REG_DEF0_USE12 (REG_DEF0 | REG_USE12) |
Vladimir Marko | 3e5af82 | 2013-11-21 15:01:20 +0000 | [diff] [blame] | 100 | #define REG_DEF0_USE123 (REG_DEF0 | REG_USE123) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 101 | #define REG_DEF0_USE1 (REG_DEF0 | REG_USE1) |
| 102 | #define REG_DEF0_USE2 (REG_DEF0 | REG_USE2) |
| 103 | #define REG_DEFAD_USEAD (REG_DEFAD_USEA | REG_USED) |
| 104 | #define REG_DEFAD_USEA (REG_DEFA_USEA | REG_DEFD) |
| 105 | #define REG_DEFA_USEA (REG_DEFA | REG_USEA) |
| 106 | #define REG_USE012 (REG_USE01 | REG_USE2) |
| 107 | #define REG_USE014 (REG_USE01 | REG_USE4) |
| 108 | #define REG_USE01 (REG_USE0 | REG_USE1) |
| 109 | #define REG_USE02 (REG_USE0 | REG_USE2) |
| 110 | #define REG_USE12 (REG_USE1 | REG_USE2) |
| 111 | #define REG_USE23 (REG_USE2 | REG_USE3) |
Vladimir Marko | 3e5af82 | 2013-11-21 15:01:20 +0000 | [diff] [blame] | 112 | #define REG_USE123 (REG_USE1 | REG_USE2 | REG_USE3) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 113 | |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 114 | // TODO: #includes need a cleanup |
| 115 | #ifndef INVALID_SREG |
| 116 | #define INVALID_SREG (-1) |
| 117 | #endif |
| 118 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 119 | struct BasicBlock; |
| 120 | struct CallInfo; |
| 121 | struct CompilationUnit; |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 122 | struct InlineMethod; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 123 | struct MIR; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 124 | struct LIR; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 125 | struct RegLocation; |
| 126 | struct RegisterInfo; |
Vladimir Marko | 5c96e6b | 2013-11-14 15:34:17 +0000 | [diff] [blame] | 127 | class DexFileMethodInliner; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 128 | class MIRGraph; |
| 129 | class Mir2Lir; |
| 130 | |
| 131 | typedef int (*NextCallInsn)(CompilationUnit*, CallInfo*, int, |
| 132 | const MethodReference& target_method, |
| 133 | uint32_t method_idx, uintptr_t direct_code, |
| 134 | uintptr_t direct_method, InvokeType type); |
| 135 | |
| 136 | typedef std::vector<uint8_t> CodeBuffer; |
| 137 | |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 138 | struct UseDefMasks { |
| 139 | uint64_t use_mask; // Resource mask for use. |
| 140 | uint64_t def_mask; // Resource mask for def. |
| 141 | }; |
| 142 | |
| 143 | struct AssemblyInfo { |
| 144 | LIR* pcrel_next; // Chain of LIR nodes needing pc relative fixups. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 145 | }; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 146 | |
| 147 | struct LIR { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 148 | CodeOffset offset; // Offset of this instruction. |
| 149 | NarrowDexOffset dalvik_offset; // Offset of Dalvik opcode in code units (16-bit words). |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 150 | int16_t opcode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 151 | LIR* next; |
| 152 | LIR* prev; |
| 153 | LIR* target; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 154 | struct { |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 155 | unsigned int alias_info:17; // For Dalvik register disambiguation. |
| 156 | bool is_nop:1; // LIR is optimized away. |
| 157 | unsigned int size:4; // Note: size of encoded instruction is in bytes. |
| 158 | bool use_def_invalid:1; // If true, masks should not be used. |
| 159 | unsigned int generation:1; // Used to track visitation state during fixup pass. |
| 160 | unsigned int fixup:8; // Fixup kind. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 161 | } flags; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 162 | union { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 163 | UseDefMasks m; // Use & Def masks used during optimization. |
Vladimir Marko | 306f017 | 2014-01-07 18:21:20 +0000 | [diff] [blame] | 164 | AssemblyInfo a; // Instruction info used during assembly phase. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 165 | } u; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 166 | int32_t operands[5]; // [0..4] = [dest, src1, src2, extra, extra2]. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 167 | }; |
| 168 | |
| 169 | // Target-specific initialization. |
| 170 | Mir2Lir* ArmCodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 171 | ArenaAllocator* const arena); |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 172 | Mir2Lir* Arm64CodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 173 | ArenaAllocator* const arena); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 174 | Mir2Lir* MipsCodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 175 | ArenaAllocator* const arena); |
| 176 | Mir2Lir* X86CodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 177 | ArenaAllocator* const arena); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 178 | Mir2Lir* X86_64CodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 179 | ArenaAllocator* const arena); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 180 | |
| 181 | // Utility macros to traverse the LIR list. |
| 182 | #define NEXT_LIR(lir) (lir->next) |
| 183 | #define PREV_LIR(lir) (lir->prev) |
| 184 | |
| 185 | // Defines for alias_info (tracks Dalvik register references). |
| 186 | #define DECODE_ALIAS_INFO_REG(X) (X & 0xffff) |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 187 | #define DECODE_ALIAS_INFO_WIDE_FLAG (0x10000) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 188 | #define DECODE_ALIAS_INFO_WIDE(X) ((X & DECODE_ALIAS_INFO_WIDE_FLAG) ? 1 : 0) |
| 189 | #define ENCODE_ALIAS_INFO(REG, ISWIDE) (REG | (ISWIDE ? DECODE_ALIAS_INFO_WIDE_FLAG : 0)) |
| 190 | |
| 191 | // Common resource macros. |
| 192 | #define ENCODE_CCODE (1ULL << kCCode) |
| 193 | #define ENCODE_FP_STATUS (1ULL << kFPStatus) |
| 194 | |
| 195 | // Abstract memory locations. |
| 196 | #define ENCODE_DALVIK_REG (1ULL << kDalvikReg) |
| 197 | #define ENCODE_LITERAL (1ULL << kLiteral) |
| 198 | #define ENCODE_HEAP_REF (1ULL << kHeapRef) |
| 199 | #define ENCODE_MUST_NOT_ALIAS (1ULL << kMustNotAlias) |
| 200 | |
| 201 | #define ENCODE_ALL (~0ULL) |
| 202 | #define ENCODE_MEM (ENCODE_DALVIK_REG | ENCODE_LITERAL | \ |
| 203 | ENCODE_HEAP_REF | ENCODE_MUST_NOT_ALIAS) |
buzbee | c729a6b | 2013-09-14 16:04:31 -0700 | [diff] [blame] | 204 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 205 | #define ENCODE_REG_PAIR(low_reg, high_reg) ((low_reg & 0xff) | ((high_reg & 0xff) << 8)) |
| 206 | #define DECODE_REG_PAIR(both_regs, low_reg, high_reg) \ |
| 207 | do { \ |
| 208 | low_reg = both_regs & 0xff; \ |
| 209 | high_reg = (both_regs >> 8) & 0xff; \ |
| 210 | } while (false) |
| 211 | |
buzbee | c729a6b | 2013-09-14 16:04:31 -0700 | [diff] [blame] | 212 | // Mask to denote sreg as the start of a double. Must not interfere with low 16 bits. |
| 213 | #define STARTING_DOUBLE_SREG 0x10000 |
| 214 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 215 | // TODO: replace these macros |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 216 | #define SLOW_FIELD_PATH (cu_->enable_debug & (1 << kDebugSlowFieldPath)) |
| 217 | #define SLOW_INVOKE_PATH (cu_->enable_debug & (1 << kDebugSlowInvokePath)) |
| 218 | #define SLOW_STRING_PATH (cu_->enable_debug & (1 << kDebugSlowStringPath)) |
| 219 | #define SLOW_TYPE_PATH (cu_->enable_debug & (1 << kDebugSlowTypePath)) |
| 220 | #define EXERCISE_SLOWEST_STRING_PATH (cu_->enable_debug & (1 << kDebugSlowestStringPath)) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 221 | |
| 222 | class Mir2Lir : public Backend { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 223 | public: |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 224 | /* |
| 225 | * Auxiliary information describing the location of data embedded in the Dalvik |
| 226 | * byte code stream. |
| 227 | */ |
| 228 | struct EmbeddedData { |
| 229 | CodeOffset offset; // Code offset of data block. |
| 230 | const uint16_t* table; // Original dex data. |
| 231 | DexOffset vaddr; // Dalvik offset of parent opcode. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 232 | }; |
| 233 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 234 | struct FillArrayData : EmbeddedData { |
| 235 | int32_t size; |
| 236 | }; |
| 237 | |
| 238 | struct SwitchTable : EmbeddedData { |
| 239 | LIR* anchor; // Reference instruction for relative offsets. |
| 240 | LIR** targets; // Array of case targets. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 241 | }; |
| 242 | |
| 243 | /* Static register use counts */ |
| 244 | struct RefCounts { |
| 245 | int count; |
| 246 | int s_reg; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 247 | }; |
| 248 | |
| 249 | /* |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 250 | * Data structure tracking the mapping detween a Dalvik value (32 or 64 bits) |
| 251 | * and native register storage. The primary purpose is to reuse previuosly |
| 252 | * loaded values, if possible, and otherwise to keep the value in register |
| 253 | * storage as long as possible. |
| 254 | * |
| 255 | * NOTE 1: wide_value refers to the width of the Dalvik value contained in |
| 256 | * this register (or pair). For example, a 64-bit register containing a 32-bit |
| 257 | * Dalvik value would have wide_value==false even though the storage container itself |
| 258 | * is wide. Similarly, a 32-bit register containing half of a 64-bit Dalvik value |
| 259 | * would have wide_value==true (and additionally would have its partner field set to the |
| 260 | * other half whose wide_value field would also be true. |
| 261 | * |
| 262 | * NOTE 2: In the case of a register pair, you can determine which of the partners |
| 263 | * is the low half by looking at the s_reg names. The high s_reg will equal low_sreg + 1. |
| 264 | * |
| 265 | * NOTE 3: In the case of a 64-bit register holding a Dalvik wide value, wide_value |
| 266 | * will be true and partner==self. s_reg refers to the low-order word of the Dalvik |
| 267 | * value, and the s_reg of the high word is implied (s_reg + 1). |
| 268 | * |
| 269 | * NOTE 4: The reg and is_temp fields should always be correct. If is_temp is false no |
| 270 | * other fields have meaning. [perhaps not true, wide should work for promoted regs?] |
| 271 | * If is_temp==true and live==false, no other fields have |
| 272 | * meaning. If is_temp==true and live==true, wide_value, partner, dirty, s_reg, def_start |
| 273 | * and def_end describe the relationship between the temp register/register pair and |
| 274 | * the Dalvik value[s] described by s_reg/s_reg+1. |
| 275 | * |
| 276 | * The fields used_storage, master_storage and storage_mask are used to track allocation |
| 277 | * in light of potential aliasing. For example, consider Arm's d2, which overlaps s4 & s5. |
| 278 | * d2's storage mask would be 0x00000003, the two low-order bits denoting 64 bits of |
| 279 | * storage use. For s4, it would be 0x0000001; for s5 0x00000002. These values should not |
| 280 | * change once initialized. The "used_storage" field tracks current allocation status. |
| 281 | * Although each record contains this field, only the field from the largest member of |
| 282 | * an aliased group is used. In our case, it would be d2's. The master_storage pointer |
| 283 | * of d2, s4 and s5 would all point to d2's used_storage field. Each bit in a used_storage |
| 284 | * represents 32 bits of storage. d2's used_storage would be initialized to 0xfffffffc. |
| 285 | * Then, if we wanted to determine whether s4 could be allocated, we would "and" |
| 286 | * s4's storage_mask with s4's *master_storage. If the result is zero, s4 is free and |
| 287 | * to allocate: *master_storage |= storage_mask. To free, *master_storage &= ~storage_mask. |
| 288 | * |
| 289 | * For an X86 vector register example, storage_mask would be: |
| 290 | * 0x00000001 for 32-bit view of xmm1 |
| 291 | * 0x00000003 for 64-bit view of xmm1 |
| 292 | * 0x0000000f for 128-bit view of xmm1 |
| 293 | * 0x000000ff for 256-bit view of ymm1 // future expansion, if needed |
| 294 | * 0x0000ffff for 512-bit view of ymm1 // future expansion, if needed |
| 295 | * 0xffffffff for 1024-bit view of ymm1 // future expansion, if needed |
| 296 | * |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 297 | * The "liveness" of a register is handled in a similar way. The liveness_ storage is |
| 298 | * held in the widest member of an aliased set. Note, though, that for a temp register to |
| 299 | * reused as live, it must both be marked live and the associated SReg() must match the |
| 300 | * desired s_reg. This gets a little complicated when dealing with aliased registers. All |
| 301 | * members of an aliased set will share the same liveness flags, but each will individually |
| 302 | * maintain s_reg_. In this way we can know that at least one member of an |
| 303 | * aliased set is live, but will only fully match on the appropriate alias view. For example, |
| 304 | * if Arm d1 is live as a double and has s_reg_ set to Dalvik v8 (which also implies v9 |
| 305 | * because it is wide), its aliases s2 and s3 will show as live, but will have |
| 306 | * s_reg_ == INVALID_SREG. An attempt to later AllocLiveReg() of v9 with a single-precision |
| 307 | * view will fail because although s3's liveness bit is set, its s_reg_ will not match v9. |
| 308 | * This will cause all members of the aliased set to be clobbered and AllocLiveReg() will |
| 309 | * report that v9 is currently not live as a single (which is what we want). |
| 310 | * |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 311 | * NOTE: the x86 usage is still somewhat in flux. There are competing notions of how |
| 312 | * to treat xmm registers: |
| 313 | * 1. Treat them all as 128-bits wide, but denote how much data used via bytes field. |
| 314 | * o This more closely matches reality, but means you'd need to be able to get |
| 315 | * to the associated RegisterInfo struct to figure out how it's being used. |
| 316 | * o This is how 64-bit core registers will be used - always 64 bits, but the |
| 317 | * "bytes" field will be 4 for 32-bit usage and 8 for 64-bit usage. |
| 318 | * 2. View the xmm registers based on contents. |
| 319 | * o A single in a xmm2 register would be k32BitVector, while a double in xmm2 would |
| 320 | * be a k64BitVector. |
| 321 | * o Note that the two uses above would be considered distinct registers (but with |
| 322 | * the aliasing mechanism, we could detect interference). |
| 323 | * o This is how aliased double and single float registers will be handled on |
| 324 | * Arm and MIPS. |
| 325 | * Working plan is, for all targets, to follow mechanism 1 for 64-bit core registers, and |
| 326 | * mechanism 2 for aliased float registers and x86 vector registers. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 327 | */ |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 328 | class RegisterInfo { |
| 329 | public: |
| 330 | RegisterInfo(RegStorage r, uint64_t mask = ENCODE_ALL); |
| 331 | ~RegisterInfo() {} |
| 332 | static void* operator new(size_t size, ArenaAllocator* arena) { |
| 333 | return arena->Alloc(size, kArenaAllocRegAlloc); |
| 334 | } |
| 335 | |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 336 | static const uint32_t k32SoloStorageMask = 0x00000001; |
| 337 | static const uint32_t kLowSingleStorageMask = 0x00000001; |
| 338 | static const uint32_t kHighSingleStorageMask = 0x00000002; |
| 339 | static const uint32_t k64SoloStorageMask = 0x00000003; |
| 340 | static const uint32_t k128SoloStorageMask = 0x0000000f; |
| 341 | static const uint32_t k256SoloStorageMask = 0x000000ff; |
| 342 | static const uint32_t k512SoloStorageMask = 0x0000ffff; |
| 343 | static const uint32_t k1024SoloStorageMask = 0xffffffff; |
| 344 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 345 | bool InUse() { return (storage_mask_ & master_->used_storage_) != 0; } |
| 346 | void MarkInUse() { master_->used_storage_ |= storage_mask_; } |
| 347 | void MarkFree() { master_->used_storage_ &= ~storage_mask_; } |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 348 | // No part of the containing storage is live in this view. |
| 349 | bool IsDead() { return (master_->liveness_ & storage_mask_) == 0; } |
| 350 | // Liveness of this view matches. Note: not equivalent to !IsDead(). |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 351 | bool IsLive() { return (master_->liveness_ & storage_mask_) == storage_mask_; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 352 | void MarkLive(int s_reg) { |
| 353 | // TODO: Anything useful to assert here? |
| 354 | s_reg_ = s_reg; |
| 355 | master_->liveness_ |= storage_mask_; |
| 356 | } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 357 | void MarkDead() { |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 358 | if (SReg() != INVALID_SREG) { |
| 359 | s_reg_ = INVALID_SREG; |
| 360 | master_->liveness_ &= ~storage_mask_; |
| 361 | ResetDefBody(); |
| 362 | } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 363 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 364 | RegStorage GetReg() { return reg_; } |
| 365 | void SetReg(RegStorage reg) { reg_ = reg; } |
| 366 | bool IsTemp() { return is_temp_; } |
| 367 | void SetIsTemp(bool val) { is_temp_ = val; } |
| 368 | bool IsWide() { return wide_value_; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 369 | void SetIsWide(bool val) { |
| 370 | wide_value_ = val; |
| 371 | if (!val) { |
| 372 | // If not wide, reset partner to self. |
| 373 | SetPartner(GetReg()); |
| 374 | } |
| 375 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 376 | bool IsDirty() { return dirty_; } |
| 377 | void SetIsDirty(bool val) { dirty_ = val; } |
| 378 | RegStorage Partner() { return partner_; } |
| 379 | void SetPartner(RegStorage partner) { partner_ = partner; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 380 | int SReg() { return (!IsTemp() || IsLive()) ? s_reg_ : INVALID_SREG; } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 381 | uint64_t DefUseMask() { return def_use_mask_; } |
| 382 | void SetDefUseMask(uint64_t def_use_mask) { def_use_mask_ = def_use_mask; } |
| 383 | RegisterInfo* Master() { return master_; } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 384 | void SetMaster(RegisterInfo* master) { |
| 385 | master_ = master; |
| 386 | if (master != this) { |
| 387 | master_->aliased_ = true; |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 388 | DCHECK(alias_chain_ == nullptr); |
| 389 | alias_chain_ = master_->alias_chain_; |
| 390 | master_->alias_chain_ = this; |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 391 | } |
| 392 | } |
| 393 | bool IsAliased() { return aliased_; } |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 394 | RegisterInfo* GetAliasChain() { return alias_chain_; } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 395 | uint32_t StorageMask() { return storage_mask_; } |
| 396 | void SetStorageMask(uint32_t storage_mask) { storage_mask_ = storage_mask; } |
| 397 | LIR* DefStart() { return def_start_; } |
| 398 | void SetDefStart(LIR* def_start) { def_start_ = def_start; } |
| 399 | LIR* DefEnd() { return def_end_; } |
| 400 | void SetDefEnd(LIR* def_end) { def_end_ = def_end; } |
| 401 | void ResetDefBody() { def_start_ = def_end_ = nullptr; } |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 402 | // Find member of aliased set matching storage_used; return nullptr if none. |
| 403 | RegisterInfo* FindMatchingView(uint32_t storage_used) { |
| 404 | RegisterInfo* res = Master(); |
| 405 | for (; res != nullptr; res = res->GetAliasChain()) { |
| 406 | if (res->StorageMask() == storage_used) |
| 407 | break; |
| 408 | } |
| 409 | return res; |
| 410 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 411 | |
| 412 | private: |
| 413 | RegStorage reg_; |
| 414 | bool is_temp_; // Can allocate as temp? |
| 415 | bool wide_value_; // Holds a Dalvik wide value (either itself, or part of a pair). |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 416 | bool dirty_; // If live, is it dirty? |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 417 | bool aliased_; // Is this the master for other aliased RegisterInfo's? |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 418 | RegStorage partner_; // If wide_value, other reg of pair or self if 64-bit register. |
| 419 | int s_reg_; // Name of live value. |
| 420 | uint64_t def_use_mask_; // Resources for this element. |
| 421 | uint32_t used_storage_; // 1 bit per 4 bytes of storage. Unused by aliases. |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 422 | uint32_t liveness_; // 1 bit per 4 bytes of storage. Unused by aliases. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 423 | RegisterInfo* master_; // Pointer to controlling storage mask. |
| 424 | uint32_t storage_mask_; // Track allocation of sub-units. |
| 425 | LIR *def_start_; // Starting inst in last def sequence. |
| 426 | LIR *def_end_; // Ending inst in last def sequence. |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 427 | RegisterInfo* alias_chain_; // Chain of aliased registers. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 428 | }; |
| 429 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 430 | class RegisterPool { |
| 431 | public: |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 432 | RegisterPool(Mir2Lir* m2l, ArenaAllocator* arena, |
Vladimir Marko | 089142c | 2014-06-05 10:57:05 +0100 | [diff] [blame] | 433 | const ArrayRef<const RegStorage>& core_regs, |
| 434 | const ArrayRef<const RegStorage>& core64_regs, |
| 435 | const ArrayRef<const RegStorage>& sp_regs, |
| 436 | const ArrayRef<const RegStorage>& dp_regs, |
| 437 | const ArrayRef<const RegStorage>& reserved_regs, |
| 438 | const ArrayRef<const RegStorage>& reserved64_regs, |
| 439 | const ArrayRef<const RegStorage>& core_temps, |
| 440 | const ArrayRef<const RegStorage>& core64_temps, |
| 441 | const ArrayRef<const RegStorage>& sp_temps, |
| 442 | const ArrayRef<const RegStorage>& dp_temps); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 443 | ~RegisterPool() {} |
| 444 | static void* operator new(size_t size, ArenaAllocator* arena) { |
| 445 | return arena->Alloc(size, kArenaAllocRegAlloc); |
| 446 | } |
| 447 | void ResetNextTemp() { |
| 448 | next_core_reg_ = 0; |
| 449 | next_sp_reg_ = 0; |
| 450 | next_dp_reg_ = 0; |
| 451 | } |
| 452 | GrowableArray<RegisterInfo*> core_regs_; |
| 453 | int next_core_reg_; |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 454 | GrowableArray<RegisterInfo*> core64_regs_; |
| 455 | int next_core64_reg_; |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 456 | GrowableArray<RegisterInfo*> sp_regs_; // Single precision float. |
| 457 | int next_sp_reg_; |
| 458 | GrowableArray<RegisterInfo*> dp_regs_; // Double precision float. |
| 459 | int next_dp_reg_; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 460 | GrowableArray<RegisterInfo*>* ref_regs_; // Points to core_regs_ or core64_regs_ |
| 461 | int* next_ref_reg_; |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 462 | |
| 463 | private: |
| 464 | Mir2Lir* const m2l_; |
| 465 | }; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 466 | |
| 467 | struct PromotionMap { |
| 468 | RegLocationType core_location:3; |
| 469 | uint8_t core_reg; |
| 470 | RegLocationType fp_location:3; |
| 471 | uint8_t FpReg; |
| 472 | bool first_in_pair; |
| 473 | }; |
| 474 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 475 | // |
| 476 | // Slow paths. This object is used generate a sequence of code that is executed in the |
| 477 | // slow path. For example, resolving a string or class is slow as it will only be executed |
| 478 | // once (after that it is resolved and doesn't need to be done again). We want slow paths |
| 479 | // to be placed out-of-line, and not require a (mispredicted, probably) conditional forward |
| 480 | // branch over them. |
| 481 | // |
| 482 | // If you want to create a slow path, declare a class derived from LIRSlowPath and provide |
| 483 | // the Compile() function that will be called near the end of the code generated by the |
| 484 | // method. |
| 485 | // |
| 486 | // The basic flow for a slow path is: |
| 487 | // |
| 488 | // CMP reg, #value |
| 489 | // BEQ fromfast |
| 490 | // cont: |
| 491 | // ... |
| 492 | // fast path code |
| 493 | // ... |
| 494 | // more code |
| 495 | // ... |
| 496 | // RETURN |
| 497 | /// |
| 498 | // fromfast: |
| 499 | // ... |
| 500 | // slow path code |
| 501 | // ... |
| 502 | // B cont |
| 503 | // |
| 504 | // So you see we need two labels and two branches. The first branch (called fromfast) is |
| 505 | // the conditional branch to the slow path code. The second label (called cont) is used |
| 506 | // as an unconditional branch target for getting back to the code after the slow path |
| 507 | // has completed. |
| 508 | // |
| 509 | |
| 510 | class LIRSlowPath { |
| 511 | public: |
| 512 | LIRSlowPath(Mir2Lir* m2l, const DexOffset dexpc, LIR* fromfast, |
| 513 | LIR* cont = nullptr) : |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 514 | m2l_(m2l), cu_(m2l->cu_), current_dex_pc_(dexpc), fromfast_(fromfast), cont_(cont) { |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 515 | m2l->StartSlowPath(cont); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 516 | } |
| 517 | virtual ~LIRSlowPath() {} |
| 518 | virtual void Compile() = 0; |
| 519 | |
| 520 | static void* operator new(size_t size, ArenaAllocator* arena) { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 521 | return arena->Alloc(size, kArenaAllocData); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 522 | } |
| 523 | |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 524 | LIR *GetContinuationLabel() { |
| 525 | return cont_; |
| 526 | } |
| 527 | |
| 528 | LIR *GetFromFast() { |
| 529 | return fromfast_; |
| 530 | } |
| 531 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 532 | protected: |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 533 | LIR* GenerateTargetLabel(int opcode = kPseudoTargetLabel); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 534 | |
| 535 | Mir2Lir* const m2l_; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 536 | CompilationUnit* const cu_; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 537 | const DexOffset current_dex_pc_; |
| 538 | LIR* const fromfast_; |
| 539 | LIR* const cont_; |
| 540 | }; |
| 541 | |
Brian Carlstrom | 9b7085a | 2013-07-18 15:15:21 -0700 | [diff] [blame] | 542 | virtual ~Mir2Lir() {} |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 543 | |
| 544 | int32_t s4FromSwitchData(const void* switch_data) { |
| 545 | return *reinterpret_cast<const int32_t*>(switch_data); |
| 546 | } |
| 547 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 548 | /* |
| 549 | * TODO: this is a trace JIT vestige, and its use should be reconsidered. At the time |
| 550 | * it was introduced, it was intended to be a quick best guess of type without having to |
| 551 | * take the time to do type analysis. Currently, though, we have a much better idea of |
| 552 | * the types of Dalvik virtual registers. Instead of using this for a best guess, why not |
| 553 | * just use our knowledge of type to select the most appropriate register class? |
| 554 | */ |
| 555 | RegisterClass RegClassBySize(OpSize size) { |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 556 | if (size == kReference) { |
| 557 | return kRefReg; |
| 558 | } else { |
| 559 | return (size == kUnsignedHalf || size == kSignedHalf || size == kUnsignedByte || |
| 560 | size == kSignedByte) ? kCoreReg : kAnyReg; |
| 561 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 562 | } |
| 563 | |
| 564 | size_t CodeBufferSizeInBytes() { |
| 565 | return code_buffer_.size() / sizeof(code_buffer_[0]); |
| 566 | } |
| 567 | |
Vladimir Marko | 306f017 | 2014-01-07 18:21:20 +0000 | [diff] [blame] | 568 | static bool IsPseudoLirOp(int opcode) { |
buzbee | 409fe94 | 2013-10-11 10:49:56 -0700 | [diff] [blame] | 569 | return (opcode < 0); |
| 570 | } |
| 571 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 572 | /* |
| 573 | * LIR operands are 32-bit integers. Sometimes, (especially for managing |
| 574 | * instructions which require PC-relative fixups), we need the operands to carry |
| 575 | * pointers. To do this, we assign these pointers an index in pointer_storage_, and |
| 576 | * hold that index in the operand array. |
| 577 | * TUNING: If use of these utilities becomes more common on 32-bit builds, it |
| 578 | * may be worth conditionally-compiling a set of identity functions here. |
| 579 | */ |
| 580 | uint32_t WrapPointer(void* pointer) { |
| 581 | uint32_t res = pointer_storage_.Size(); |
| 582 | pointer_storage_.Insert(pointer); |
| 583 | return res; |
| 584 | } |
| 585 | |
| 586 | void* UnwrapPointer(size_t index) { |
| 587 | return pointer_storage_.Get(index); |
| 588 | } |
| 589 | |
| 590 | // strdup(), but allocates from the arena. |
| 591 | char* ArenaStrdup(const char* str) { |
| 592 | size_t len = strlen(str) + 1; |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 593 | char* res = reinterpret_cast<char*>(arena_->Alloc(len, kArenaAllocMisc)); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 594 | if (res != NULL) { |
| 595 | strncpy(res, str, len); |
| 596 | } |
| 597 | return res; |
| 598 | } |
| 599 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 600 | // Shared by all targets - implemented in codegen_util.cc |
| 601 | void AppendLIR(LIR* lir); |
| 602 | void InsertLIRBefore(LIR* current_lir, LIR* new_lir); |
| 603 | void InsertLIRAfter(LIR* current_lir, LIR* new_lir); |
| 604 | |
Razvan A Lupusoru | da7a69b | 2014-01-08 15:09:50 -0800 | [diff] [blame] | 605 | /** |
| 606 | * @brief Provides the maximum number of compiler temporaries that the backend can/wants |
| 607 | * to place in a frame. |
| 608 | * @return Returns the maximum number of compiler temporaries. |
| 609 | */ |
| 610 | size_t GetMaxPossibleCompilerTemps() const; |
| 611 | |
| 612 | /** |
| 613 | * @brief Provides the number of bytes needed in frame for spilling of compiler temporaries. |
| 614 | * @return Returns the size in bytes for space needed for compiler temporary spill region. |
| 615 | */ |
| 616 | size_t GetNumBytesForCompilerTempSpillRegion(); |
| 617 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 618 | DexOffset GetCurrentDexPc() const { |
| 619 | return current_dalvik_offset_; |
| 620 | } |
| 621 | |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 622 | RegisterClass ShortyToRegClass(char shorty_type); |
| 623 | RegisterClass LocToRegClass(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 624 | int ComputeFrameSize(); |
| 625 | virtual void Materialize(); |
| 626 | virtual CompiledMethod* GetCompiledMethod(); |
| 627 | void MarkSafepointPC(LIR* inst); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 628 | void SetupResourceMasks(LIR* lir, bool leave_mem_ref = false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 629 | void SetMemRefType(LIR* lir, bool is_load, int mem_type); |
| 630 | void AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load, bool is64bit); |
| 631 | void SetupRegMask(uint64_t* mask, int reg); |
| 632 | void DumpLIRInsn(LIR* arg, unsigned char* base_addr); |
| 633 | void DumpPromotionMap(); |
| 634 | void CodegenDump(); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 635 | LIR* RawLIR(DexOffset dalvik_offset, int opcode, int op0 = 0, int op1 = 0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 636 | int op2 = 0, int op3 = 0, int op4 = 0, LIR* target = NULL); |
| 637 | LIR* NewLIR0(int opcode); |
| 638 | LIR* NewLIR1(int opcode, int dest); |
| 639 | LIR* NewLIR2(int opcode, int dest, int src1); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 640 | LIR* NewLIR2NoDest(int opcode, int src, int info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 641 | LIR* NewLIR3(int opcode, int dest, int src1, int src2); |
| 642 | LIR* NewLIR4(int opcode, int dest, int src1, int src2, int info); |
| 643 | LIR* NewLIR5(int opcode, int dest, int src1, int src2, int info1, int info2); |
| 644 | LIR* ScanLiteralPool(LIR* data_target, int value, unsigned int delta); |
| 645 | LIR* ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi); |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 646 | LIR* ScanLiteralPoolMethod(LIR* data_target, const MethodReference& method); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 647 | LIR* AddWordData(LIR* *constant_list_p, int value); |
| 648 | LIR* AddWideData(LIR* *constant_list_p, int val_lo, int val_hi); |
| 649 | void ProcessSwitchTables(); |
| 650 | void DumpSparseSwitchTable(const uint16_t* table); |
| 651 | void DumpPackedSwitchTable(const uint16_t* table); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 652 | void MarkBoundary(DexOffset offset, const char* inst_str); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 653 | void NopLIR(LIR* lir); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 654 | void UnlinkLIR(LIR* lir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 655 | bool EvaluateBranch(Instruction::Code opcode, int src1, int src2); |
| 656 | bool IsInexpensiveConstant(RegLocation rl_src); |
| 657 | ConditionCode FlipComparisonOrder(ConditionCode before); |
Vladimir Marko | a1a7074 | 2014-03-03 10:28:05 +0000 | [diff] [blame] | 658 | ConditionCode NegateComparison(ConditionCode before); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 659 | virtual void InstallLiteralPools(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 660 | void InstallSwitchTables(); |
| 661 | void InstallFillArrayData(); |
| 662 | bool VerifyCatchEntries(); |
| 663 | void CreateMappingTables(); |
| 664 | void CreateNativeGcMap(); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 665 | int AssignLiteralOffset(CodeOffset offset); |
| 666 | int AssignSwitchTablesOffset(CodeOffset offset); |
| 667 | int AssignFillArrayDataOffset(CodeOffset offset); |
| 668 | LIR* InsertCaseLabel(DexOffset vaddr, int keyVal); |
| 669 | void MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec); |
| 670 | void MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 671 | |
| 672 | virtual void StartSlowPath(LIR *label) {} |
| 673 | virtual void BeginInvoke(CallInfo* info) {} |
| 674 | virtual void EndInvoke(CallInfo* info) {} |
| 675 | |
| 676 | |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 677 | // Handle bookkeeping to convert a wide RegLocation to a narrow RegLocation. No code generated. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 678 | RegLocation NarrowRegLoc(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 679 | |
| 680 | // Shared by all targets - implemented in local_optimizations.cc |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 681 | void ConvertMemOpIntoMove(LIR* orig_lir, RegStorage dest, RegStorage src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 682 | void ApplyLoadStoreElimination(LIR* head_lir, LIR* tail_lir); |
| 683 | void ApplyLoadHoisting(LIR* head_lir, LIR* tail_lir); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 684 | virtual void ApplyLocalOptimizations(LIR* head_lir, LIR* tail_lir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 685 | |
| 686 | // Shared by all targets - implemented in ralloc_util.cc |
| 687 | int GetSRegHi(int lowSreg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 688 | bool LiveOut(int s_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 689 | void SimpleRegAlloc(); |
| 690 | void ResetRegPool(); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 691 | void CompilerInitPool(RegisterInfo* info, RegStorage* regs, int num); |
| 692 | void DumpRegPool(GrowableArray<RegisterInfo*>* regs); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 693 | void DumpCoreRegPool(); |
| 694 | void DumpFpRegPool(); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 695 | void DumpRegPools(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 696 | /* Mark a temp register as dead. Does not affect allocation state. */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 697 | void Clobber(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 698 | void ClobberSReg(int s_reg); |
buzbee | 642fe34 | 2014-05-23 16:04:08 -0700 | [diff] [blame] | 699 | void ClobberAliases(RegisterInfo* info, uint32_t clobber_mask); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 700 | int SRegToPMap(int s_reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 701 | void RecordCorePromotion(RegStorage reg, int s_reg); |
| 702 | RegStorage AllocPreservedCoreReg(int s_reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 703 | void RecordSinglePromotion(RegStorage reg, int s_reg); |
| 704 | void RecordDoublePromotion(RegStorage reg, int s_reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 705 | RegStorage AllocPreservedSingle(int s_reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 706 | virtual RegStorage AllocPreservedDouble(int s_reg); |
| 707 | RegStorage AllocTempBody(GrowableArray<RegisterInfo*> ®s, int* next_temp, bool required); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 708 | virtual RegStorage AllocFreeTemp(); |
| 709 | virtual RegStorage AllocTemp(); |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 710 | virtual RegStorage AllocTempWide(); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 711 | virtual RegStorage AllocTempRef(); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 712 | virtual RegStorage AllocTempSingle(); |
| 713 | virtual RegStorage AllocTempDouble(); |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 714 | virtual RegStorage AllocTypedTemp(bool fp_hint, int reg_class); |
| 715 | virtual RegStorage AllocTypedTempWide(bool fp_hint, int reg_class); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 716 | void FlushReg(RegStorage reg); |
| 717 | void FlushRegWide(RegStorage reg); |
| 718 | RegStorage AllocLiveReg(int s_reg, int reg_class, bool wide); |
| 719 | RegStorage FindLiveReg(GrowableArray<RegisterInfo*> ®s, int s_reg); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 720 | virtual void FreeTemp(RegStorage reg); |
| 721 | virtual void FreeRegLocTemps(RegLocation rl_keep, RegLocation rl_free); |
| 722 | virtual bool IsLive(RegStorage reg); |
| 723 | virtual bool IsTemp(RegStorage reg); |
buzbee | 262b299 | 2014-03-27 11:22:43 -0700 | [diff] [blame] | 724 | bool IsPromoted(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 725 | bool IsDirty(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 726 | void LockTemp(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 727 | void ResetDef(RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 728 | void NullifyRange(RegStorage reg, int s_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 729 | void MarkDef(RegLocation rl, LIR *start, LIR *finish); |
| 730 | void MarkDefWide(RegLocation rl, LIR *start, LIR *finish); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 731 | void ResetDefLoc(RegLocation rl); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 732 | void ResetDefLocWide(RegLocation rl); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 733 | void ResetDefTracking(); |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 734 | void ClobberAllTemps(); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 735 | void FlushSpecificReg(RegisterInfo* info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 736 | void FlushAllRegs(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 737 | bool RegClassMatches(int reg_class, RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 738 | void MarkLive(RegLocation loc); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 739 | void MarkTemp(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 740 | void UnmarkTemp(RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 741 | void MarkWide(RegStorage reg); |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 742 | void MarkNarrow(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 743 | void MarkClean(RegLocation loc); |
| 744 | void MarkDirty(RegLocation loc); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 745 | void MarkInUse(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 746 | bool CheckCorePoolSanity(); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 747 | virtual RegLocation UpdateLoc(RegLocation loc); |
| 748 | virtual RegLocation UpdateLocWide(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 749 | RegLocation UpdateRawLoc(RegLocation loc); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 750 | |
| 751 | /** |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 752 | * @brief Used to prepare a register location to receive a wide value. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 753 | * @see EvalLoc |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 754 | * @param loc the location where the value will be stored. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 755 | * @param reg_class Type of register needed. |
| 756 | * @param update Whether the liveness information should be updated. |
| 757 | * @return Returns the properly typed temporary in physical register pairs. |
| 758 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 759 | virtual RegLocation EvalLocWide(RegLocation loc, int reg_class, bool update); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 760 | |
| 761 | /** |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 762 | * @brief Used to prepare a register location to receive a value. |
| 763 | * @param loc the location where the value will be stored. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 764 | * @param reg_class Type of register needed. |
| 765 | * @param update Whether the liveness information should be updated. |
| 766 | * @return Returns the properly typed temporary in physical register. |
| 767 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 768 | virtual RegLocation EvalLoc(RegLocation loc, int reg_class, bool update); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 769 | |
buzbee | c729a6b | 2013-09-14 16:04:31 -0700 | [diff] [blame] | 770 | void CountRefs(RefCounts* core_counts, RefCounts* fp_counts, size_t num_regs); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 771 | void DumpCounts(const RefCounts* arr, int size, const char* msg); |
| 772 | void DoPromotion(); |
| 773 | int VRegOffset(int v_reg); |
| 774 | int SRegOffset(int s_reg); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 775 | RegLocation GetReturnWide(RegisterClass reg_class); |
| 776 | RegLocation GetReturn(RegisterClass reg_class); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 777 | RegisterInfo* GetRegInfo(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 778 | |
| 779 | // Shared by all targets - implemented in gen_common.cc. |
Mingyao Yang | 3a74d15 | 2014-04-21 15:39:44 -0700 | [diff] [blame] | 780 | void AddIntrinsicSlowPath(CallInfo* info, LIR* branch, LIR* resume = nullptr); |
buzbee | 11b63d1 | 2013-08-27 07:34:17 -0700 | [diff] [blame] | 781 | bool HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 782 | RegLocation rl_src, RegLocation rl_dest, int lit); |
| 783 | bool HandleEasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 784 | virtual void HandleSlowPaths(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 785 | void GenBarrier(); |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 786 | void GenDivZeroException(); |
| 787 | // c_code holds condition code that's generated from testing divisor against 0. |
| 788 | void GenDivZeroCheck(ConditionCode c_code); |
| 789 | // reg holds divisor. |
| 790 | void GenDivZeroCheck(RegStorage reg); |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 791 | void GenArrayBoundsCheck(RegStorage index, RegStorage length); |
| 792 | void GenArrayBoundsCheck(int32_t index, RegStorage length); |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 793 | LIR* GenNullCheck(RegStorage reg); |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 794 | void MarkPossibleNullPointerException(int opt_flags); |
| 795 | void MarkPossibleStackOverflowException(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 796 | void ForceImplicitNullCheck(RegStorage reg, int opt_flags); |
| 797 | LIR* GenImmedCheck(ConditionCode c_code, RegStorage reg, int imm_val, ThrowKind kind); |
| 798 | LIR* GenNullCheck(RegStorage m_reg, int opt_flags); |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 799 | LIR* GenExplicitNullCheck(RegStorage m_reg, int opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 800 | void GenCompareAndBranch(Instruction::Code opcode, RegLocation rl_src1, |
| 801 | RegLocation rl_src2, LIR* taken, LIR* fall_through); |
| 802 | void GenCompareZeroAndBranch(Instruction::Code opcode, RegLocation rl_src, |
| 803 | LIR* taken, LIR* fall_through); |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 804 | virtual void GenIntToLong(RegLocation rl_dest, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 805 | void GenIntNarrowing(Instruction::Code opcode, RegLocation rl_dest, |
| 806 | RegLocation rl_src); |
| 807 | void GenNewArray(uint32_t type_idx, RegLocation rl_dest, |
| 808 | RegLocation rl_src); |
| 809 | void GenFilledNewArray(CallInfo* info); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 810 | void GenSput(MIR* mir, RegLocation rl_src, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 811 | bool is_long_or_double, bool is_object); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 812 | void GenSget(MIR* mir, RegLocation rl_dest, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 813 | bool is_long_or_double, bool is_object); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 814 | void GenIGet(MIR* mir, int opt_flags, OpSize size, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 815 | RegLocation rl_dest, RegLocation rl_obj, bool is_long_or_double, bool is_object); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 816 | void GenIPut(MIR* mir, int opt_flags, OpSize size, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 817 | RegLocation rl_src, RegLocation rl_obj, bool is_long_or_double, bool is_object); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 818 | void GenArrayObjPut(int opt_flags, RegLocation rl_array, RegLocation rl_index, |
| 819 | RegLocation rl_src); |
| 820 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 821 | void GenConstClass(uint32_t type_idx, RegLocation rl_dest); |
| 822 | void GenConstString(uint32_t string_idx, RegLocation rl_dest); |
| 823 | void GenNewInstance(uint32_t type_idx, RegLocation rl_dest); |
| 824 | void GenThrow(RegLocation rl_src); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 825 | void GenInstanceof(uint32_t type_idx, RegLocation rl_dest, RegLocation rl_src); |
| 826 | void GenCheckCast(uint32_t insn_idx, uint32_t type_idx, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 827 | void GenLong3Addr(OpKind first_op, OpKind second_op, RegLocation rl_dest, |
| 828 | RegLocation rl_src1, RegLocation rl_src2); |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 829 | virtual void GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 830 | RegLocation rl_src1, RegLocation rl_shift); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 831 | void GenArithOpIntLit(Instruction::Code opcode, RegLocation rl_dest, |
| 832 | RegLocation rl_src, int lit); |
| 833 | void GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 834 | RegLocation rl_src1, RegLocation rl_src2); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 835 | template <size_t pointer_size> |
| 836 | void GenConversionCall(ThreadOffset<pointer_size> func_offset, RegLocation rl_dest, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 837 | RegLocation rl_src); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 838 | virtual void GenSuspendTest(int opt_flags); |
| 839 | virtual void GenSuspendTestAndBranch(int opt_flags, LIR* target); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 840 | |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 841 | // This will be overridden by x86 implementation. |
| 842 | virtual void GenConstWide(RegLocation rl_dest, int64_t value); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 843 | virtual void GenArithOpInt(Instruction::Code opcode, RegLocation rl_dest, |
| 844 | RegLocation rl_src1, RegLocation rl_src2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 845 | |
| 846 | // Shared by all targets - implemented in gen_invoke.cc. |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 847 | template <size_t pointer_size> |
| 848 | LIR* CallHelper(RegStorage r_tgt, ThreadOffset<pointer_size> helper_offset, bool safepoint_pc, |
Dave Allison | d6ed642 | 2014-04-09 23:36:15 +0000 | [diff] [blame] | 849 | bool use_link = true); |
| 850 | RegStorage CallHelperSetup(ThreadOffset<4> helper_offset); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 851 | RegStorage CallHelperSetup(ThreadOffset<8> helper_offset); |
| 852 | template <size_t pointer_size> |
| 853 | void CallRuntimeHelper(ThreadOffset<pointer_size> helper_offset, bool safepoint_pc); |
| 854 | template <size_t pointer_size> |
| 855 | void CallRuntimeHelperImm(ThreadOffset<pointer_size> helper_offset, int arg0, bool safepoint_pc); |
| 856 | template <size_t pointer_size> |
| 857 | void CallRuntimeHelperReg(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, bool safepoint_pc); |
| 858 | template <size_t pointer_size> |
| 859 | void CallRuntimeHelperRegLocation(ThreadOffset<pointer_size> helper_offset, RegLocation arg0, |
Ian Rogers | 468532e | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 860 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 861 | template <size_t pointer_size> |
| 862 | void CallRuntimeHelperImmImm(ThreadOffset<pointer_size> helper_offset, int arg0, int arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 863 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 864 | template <size_t pointer_size> |
| 865 | void CallRuntimeHelperImmRegLocation(ThreadOffset<pointer_size> helper_offset, int arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 866 | RegLocation arg1, bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 867 | template <size_t pointer_size> |
| 868 | void CallRuntimeHelperRegLocationImm(ThreadOffset<pointer_size> helper_offset, RegLocation arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 869 | int arg1, bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 870 | template <size_t pointer_size> |
| 871 | void CallRuntimeHelperImmReg(ThreadOffset<pointer_size> helper_offset, int arg0, RegStorage arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 872 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 873 | template <size_t pointer_size> |
| 874 | void CallRuntimeHelperRegImm(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, int arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 875 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 876 | template <size_t pointer_size> |
| 877 | void CallRuntimeHelperImmMethod(ThreadOffset<pointer_size> helper_offset, int arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 878 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 879 | template <size_t pointer_size> |
| 880 | void CallRuntimeHelperRegMethod(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 881 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 882 | template <size_t pointer_size> |
| 883 | void CallRuntimeHelperRegMethodRegLocation(ThreadOffset<pointer_size> helper_offset, |
| 884 | RegStorage arg0, RegLocation arg2, bool safepoint_pc); |
| 885 | template <size_t pointer_size> |
| 886 | void CallRuntimeHelperRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 887 | RegLocation arg0, RegLocation arg1, |
| 888 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 889 | template <size_t pointer_size> |
| 890 | void CallRuntimeHelperRegReg(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, |
| 891 | RegStorage arg1, bool safepoint_pc); |
| 892 | template <size_t pointer_size> |
| 893 | void CallRuntimeHelperRegRegImm(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, |
| 894 | RegStorage arg1, int arg2, bool safepoint_pc); |
| 895 | template <size_t pointer_size> |
| 896 | void CallRuntimeHelperImmMethodRegLocation(ThreadOffset<pointer_size> helper_offset, int arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 897 | RegLocation arg2, bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 898 | template <size_t pointer_size> |
| 899 | void CallRuntimeHelperImmMethodImm(ThreadOffset<pointer_size> helper_offset, int arg0, int arg2, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 900 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 901 | template <size_t pointer_size> |
| 902 | void CallRuntimeHelperImmRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 903 | int arg0, RegLocation arg1, RegLocation arg2, |
| 904 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 905 | template <size_t pointer_size> |
| 906 | void CallRuntimeHelperRegLocationRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset, |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 907 | RegLocation arg0, RegLocation arg1, |
| 908 | RegLocation arg2, |
| 909 | bool safepoint_pc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 910 | void GenInvoke(CallInfo* info); |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 911 | void GenInvokeNoInline(CallInfo* info); |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 912 | virtual void FlushIns(RegLocation* ArgLocs, RegLocation rl_method); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 913 | int GenDalvikArgsNoRange(CallInfo* info, int call_state, LIR** pcrLabel, |
| 914 | NextCallInsn next_call_insn, |
| 915 | const MethodReference& target_method, |
| 916 | uint32_t vtable_idx, |
| 917 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 918 | bool skip_this); |
| 919 | int GenDalvikArgsRange(CallInfo* info, int call_state, LIR** pcrLabel, |
| 920 | NextCallInsn next_call_insn, |
| 921 | const MethodReference& target_method, |
| 922 | uint32_t vtable_idx, |
| 923 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 924 | bool skip_this); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 925 | |
| 926 | /** |
| 927 | * @brief Used to determine the register location of destination. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 928 | * @details This is needed during generation of inline intrinsics because it finds destination |
| 929 | * of return, |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 930 | * either the physical register or the target of move-result. |
| 931 | * @param info Information about the invoke. |
| 932 | * @return Returns the destination location. |
| 933 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 934 | RegLocation InlineTarget(CallInfo* info); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 935 | |
| 936 | /** |
| 937 | * @brief Used to determine the wide register location of destination. |
| 938 | * @see InlineTarget |
| 939 | * @param info Information about the invoke. |
| 940 | * @return Returns the destination location. |
| 941 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 942 | RegLocation InlineTargetWide(CallInfo* info); |
| 943 | |
| 944 | bool GenInlinedCharAt(CallInfo* info); |
| 945 | bool GenInlinedStringIsEmptyOrLength(CallInfo* info, bool is_empty); |
Vladimir Marko | 6bdf1ff | 2013-10-29 17:40:46 +0000 | [diff] [blame] | 946 | bool GenInlinedReverseBytes(CallInfo* info, OpSize size); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 947 | bool GenInlinedAbsInt(CallInfo* info); |
| 948 | bool GenInlinedAbsLong(CallInfo* info); |
Yixin Shou | dbb17e3 | 2014-02-07 05:09:30 -0800 | [diff] [blame] | 949 | bool GenInlinedAbsFloat(CallInfo* info); |
| 950 | bool GenInlinedAbsDouble(CallInfo* info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 951 | bool GenInlinedFloatCvt(CallInfo* info); |
| 952 | bool GenInlinedDoubleCvt(CallInfo* info); |
Mark Mendell | 4028a6c | 2014-02-19 20:06:20 -0800 | [diff] [blame] | 953 | virtual bool GenInlinedIndexOf(CallInfo* info, bool zero_based); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 954 | bool GenInlinedStringCompareTo(CallInfo* info); |
| 955 | bool GenInlinedCurrentThread(CallInfo* info); |
| 956 | bool GenInlinedUnsafeGet(CallInfo* info, bool is_long, bool is_volatile); |
| 957 | bool GenInlinedUnsafePut(CallInfo* info, bool is_long, bool is_object, |
| 958 | bool is_volatile, bool is_ordered); |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 959 | virtual int LoadArgRegs(CallInfo* info, int call_state, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 960 | NextCallInsn next_call_insn, |
| 961 | const MethodReference& target_method, |
| 962 | uint32_t vtable_idx, |
| 963 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 964 | bool skip_this); |
| 965 | |
| 966 | // Shared by all targets - implemented in gen_loadstore.cc. |
| 967 | RegLocation LoadCurrMethod(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 968 | void LoadCurrMethodDirect(RegStorage r_tgt); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 969 | virtual LIR* LoadConstant(RegStorage r_dest, int value); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 970 | // Natural word size. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 971 | virtual LIR* LoadWordDisp(RegStorage r_base, int displacement, RegStorage r_dest) { |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 972 | return LoadBaseDisp(r_base, displacement, r_dest, kWord); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 973 | } |
| 974 | // Load 32 bits, regardless of target. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 975 | virtual LIR* Load32Disp(RegStorage r_base, int displacement, RegStorage r_dest) { |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 976 | return LoadBaseDisp(r_base, displacement, r_dest, k32); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 977 | } |
| 978 | // Load a reference at base + displacement and decompress into register. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 979 | virtual LIR* LoadRefDisp(RegStorage r_base, int displacement, RegStorage r_dest) { |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 980 | return LoadBaseDisp(r_base, displacement, r_dest, kReference); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 981 | } |
| 982 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 983 | virtual RegLocation LoadValue(RegLocation rl_src, RegisterClass op_kind); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 984 | // Same as above, but derive the target register class from the location record. |
| 985 | virtual RegLocation LoadValue(RegLocation rl_src); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 986 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 987 | virtual RegLocation LoadValueWide(RegLocation rl_src, RegisterClass op_kind); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 988 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 989 | virtual void LoadValueDirect(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 990 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 991 | virtual void LoadValueDirectFixed(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 992 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 993 | virtual void LoadValueDirectWide(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 994 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 995 | virtual void LoadValueDirectWideFixed(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 996 | // Store an item of natural word size. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 997 | virtual LIR* StoreWordDisp(RegStorage r_base, int displacement, RegStorage r_src) { |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 998 | return StoreBaseDisp(r_base, displacement, r_src, kWord); |
| 999 | } |
| 1000 | // Store an uncompressed reference into a compressed 32-bit container. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1001 | virtual LIR* StoreRefDisp(RegStorage r_base, int displacement, RegStorage r_src) { |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1002 | return StoreBaseDisp(r_base, displacement, r_src, kReference); |
| 1003 | } |
| 1004 | // Store 32 bits, regardless of target. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1005 | virtual LIR* Store32Disp(RegStorage r_base, int displacement, RegStorage r_src) { |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1006 | return StoreBaseDisp(r_base, displacement, r_src, k32); |
| 1007 | } |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1008 | |
| 1009 | /** |
| 1010 | * @brief Used to do the final store in the destination as per bytecode semantics. |
| 1011 | * @param rl_dest The destination dalvik register location. |
| 1012 | * @param rl_src The source register location. Can be either physical register or dalvik register. |
| 1013 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1014 | virtual void StoreValue(RegLocation rl_dest, RegLocation rl_src); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1015 | |
| 1016 | /** |
| 1017 | * @brief Used to do the final store in a wide destination as per bytecode semantics. |
| 1018 | * @see StoreValue |
| 1019 | * @param rl_dest The destination dalvik register location. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1020 | * @param rl_src The source register location. Can be either physical register or dalvik |
| 1021 | * register. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1022 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1023 | virtual void StoreValueWide(RegLocation rl_dest, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1024 | |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1025 | /** |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 1026 | * @brief Used to do the final store to a destination as per bytecode semantics. |
| 1027 | * @see StoreValue |
| 1028 | * @param rl_dest The destination dalvik register location. |
| 1029 | * @param rl_src The source register location. It must be kLocPhysReg |
| 1030 | * |
| 1031 | * This is used for x86 two operand computations, where we have computed the correct |
| 1032 | * register value that now needs to be properly registered. This is used to avoid an |
| 1033 | * extra register copy that would result if StoreValue was called. |
| 1034 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1035 | virtual void StoreFinalValue(RegLocation rl_dest, RegLocation rl_src); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 1036 | |
| 1037 | /** |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1038 | * @brief Used to do the final store in a wide destination as per bytecode semantics. |
| 1039 | * @see StoreValueWide |
| 1040 | * @param rl_dest The destination dalvik register location. |
| 1041 | * @param rl_src The source register location. It must be kLocPhysReg |
| 1042 | * |
| 1043 | * This is used for x86 two operand computations, where we have computed the correct |
| 1044 | * register values that now need to be properly registered. This is used to avoid an |
| 1045 | * extra pair of register copies that would result if StoreValueWide was called. |
| 1046 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1047 | virtual void StoreFinalValueWide(RegLocation rl_dest, RegLocation rl_src); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1048 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1049 | // Shared by all targets - implemented in mir_to_lir.cc. |
| 1050 | void CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1051 | virtual void HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1052 | bool MethodBlockCodeGen(BasicBlock* bb); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1053 | bool SpecialMIR2LIR(const InlineMethod& special); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1054 | virtual void MethodMIR2LIR(); |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1055 | // Update LIR for verbose listings. |
| 1056 | void UpdateLIROffsets(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1057 | |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1058 | /* |
| 1059 | * @brief Load the address of the dex method into the register. |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1060 | * @param target_method The MethodReference of the method to be invoked. |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1061 | * @param type How the method will be invoked. |
| 1062 | * @param register that will contain the code address. |
| 1063 | * @note register will be passed to TargetReg to get physical register. |
| 1064 | */ |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1065 | void LoadCodeAddress(const MethodReference& target_method, InvokeType type, |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1066 | SpecialTargetRegister symbolic_reg); |
| 1067 | |
| 1068 | /* |
| 1069 | * @brief Load the Method* of a dex method into the register. |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1070 | * @param target_method The MethodReference of the method to be invoked. |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1071 | * @param type How the method will be invoked. |
| 1072 | * @param register that will contain the code address. |
| 1073 | * @note register will be passed to TargetReg to get physical register. |
| 1074 | */ |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1075 | virtual void LoadMethodAddress(const MethodReference& target_method, InvokeType type, |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1076 | SpecialTargetRegister symbolic_reg); |
| 1077 | |
| 1078 | /* |
| 1079 | * @brief Load the Class* of a Dex Class type into the register. |
| 1080 | * @param type How the method will be invoked. |
| 1081 | * @param register that will contain the code address. |
| 1082 | * @note register will be passed to TargetReg to get physical register. |
| 1083 | */ |
| 1084 | virtual void LoadClassType(uint32_t type_idx, SpecialTargetRegister symbolic_reg); |
| 1085 | |
Mark Mendell | 766e929 | 2014-01-27 07:55:47 -0800 | [diff] [blame] | 1086 | // Routines that work for the generic case, but may be overriden by target. |
| 1087 | /* |
| 1088 | * @brief Compare memory to immediate, and branch if condition true. |
| 1089 | * @param cond The condition code that when true will branch to the target. |
| 1090 | * @param temp_reg A temporary register that can be used if compare to memory is not |
| 1091 | * supported by the architecture. |
| 1092 | * @param base_reg The register holding the base address. |
| 1093 | * @param offset The offset from the base. |
| 1094 | * @param check_value The immediate to compare to. |
| 1095 | * @returns The branch instruction that was generated. |
| 1096 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1097 | virtual LIR* OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg, |
Mark Mendell | 766e929 | 2014-01-27 07:55:47 -0800 | [diff] [blame] | 1098 | int offset, int check_value, LIR* target); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1099 | |
| 1100 | // Required for target - codegen helpers. |
buzbee | 11b63d1 | 2013-08-27 07:34:17 -0700 | [diff] [blame] | 1101 | virtual bool SmallLiteralDivRem(Instruction::Code dalvik_opcode, bool is_div, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1102 | RegLocation rl_src, RegLocation rl_dest, int lit) = 0; |
Ian Rogers | e2143c0 | 2014-03-28 08:47:16 -0700 | [diff] [blame] | 1103 | virtual bool EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) = 0; |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 1104 | virtual LIR* CheckSuspendUsingLoad() = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1105 | |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1106 | virtual RegStorage LoadHelper(ThreadOffset<4> offset) = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1107 | virtual RegStorage LoadHelper(ThreadOffset<8> offset) = 0; |
| 1108 | |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 1109 | virtual LIR* LoadBaseDispVolatile(RegStorage r_base, int displacement, RegStorage r_dest, |
| 1110 | OpSize size) = 0; |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 1111 | virtual LIR* LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest, |
| 1112 | OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1113 | virtual LIR* LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, |
| 1114 | int scale, OpSize size) = 0; |
| 1115 | virtual LIR* LoadBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale, |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 1116 | int displacement, RegStorage r_dest, OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1117 | virtual LIR* LoadConstantNoClobber(RegStorage r_dest, int value) = 0; |
| 1118 | virtual LIR* LoadConstantWide(RegStorage r_dest, int64_t value) = 0; |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 1119 | virtual LIR* StoreBaseDispVolatile(RegStorage r_base, int displacement, RegStorage r_src, |
| 1120 | OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1121 | virtual LIR* StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src, |
| 1122 | OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1123 | virtual LIR* StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, |
| 1124 | int scale, OpSize size) = 0; |
| 1125 | virtual LIR* StoreBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale, |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 1126 | int displacement, RegStorage r_src, OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1127 | virtual void MarkGCCard(RegStorage val_reg, RegStorage tgt_addr_reg) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1128 | |
| 1129 | // Required for target - register utilities. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1130 | virtual RegStorage TargetReg(SpecialTargetRegister reg) = 0; |
| 1131 | virtual RegStorage GetArgMappingToPhysicalReg(int arg_num) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1132 | virtual RegLocation GetReturnAlt() = 0; |
| 1133 | virtual RegLocation GetReturnWideAlt() = 0; |
| 1134 | virtual RegLocation LocCReturn() = 0; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 1135 | virtual RegLocation LocCReturnRef() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1136 | virtual RegLocation LocCReturnDouble() = 0; |
| 1137 | virtual RegLocation LocCReturnFloat() = 0; |
| 1138 | virtual RegLocation LocCReturnWide() = 0; |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1139 | virtual uint64_t GetRegMaskCommon(RegStorage reg) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1140 | virtual void AdjustSpillMask() = 0; |
Vladimir Marko | 31c2aac | 2013-12-09 16:31:19 +0000 | [diff] [blame] | 1141 | virtual void ClobberCallerSave() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1142 | virtual void FreeCallTemps() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1143 | virtual void LockCallTemps() = 0; |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1144 | virtual void MarkPreservedSingle(int v_reg, RegStorage reg) = 0; |
| 1145 | virtual void MarkPreservedDouble(int v_reg, RegStorage reg) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1146 | virtual void CompilerInitializeRegAlloc() = 0; |
| 1147 | |
| 1148 | // Required for target - miscellaneous. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1149 | virtual void AssembleLIR() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1150 | virtual void DumpResourceMask(LIR* lir, uint64_t mask, const char* prefix) = 0; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1151 | virtual void SetupTargetResourceMasks(LIR* lir, uint64_t flags) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1152 | virtual const char* GetTargetInstFmt(int opcode) = 0; |
| 1153 | virtual const char* GetTargetInstName(int opcode) = 0; |
| 1154 | virtual std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) = 0; |
| 1155 | virtual uint64_t GetPCUseDefEncoding() = 0; |
| 1156 | virtual uint64_t GetTargetInstFlags(int opcode) = 0; |
| 1157 | virtual int GetInsnSize(LIR* lir) = 0; |
| 1158 | virtual bool IsUnconditionalBranch(LIR* lir) = 0; |
| 1159 | |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 1160 | // Check support for volatile load/store of a given size. |
| 1161 | virtual bool SupportsVolatileLoadStore(OpSize size) = 0; |
| 1162 | // Get the register class for load/store of a field. |
| 1163 | virtual RegisterClass RegClassForFieldLoadStore(OpSize size, bool is_volatile) = 0; |
| 1164 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1165 | // Required for target - Dalvik-level generators. |
| 1166 | virtual void GenArithImmOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 1167 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1168 | virtual void GenMulLong(Instruction::Code, |
| 1169 | RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1170 | RegLocation rl_src2) = 0; |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1171 | virtual void GenAddLong(Instruction::Code, |
| 1172 | RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1173 | RegLocation rl_src2) = 0; |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1174 | virtual void GenAndLong(Instruction::Code, |
| 1175 | RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1176 | RegLocation rl_src2) = 0; |
| 1177 | virtual void GenArithOpDouble(Instruction::Code opcode, |
| 1178 | RegLocation rl_dest, RegLocation rl_src1, |
| 1179 | RegLocation rl_src2) = 0; |
| 1180 | virtual void GenArithOpFloat(Instruction::Code opcode, RegLocation rl_dest, |
| 1181 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
| 1182 | virtual void GenCmpFP(Instruction::Code opcode, RegLocation rl_dest, |
| 1183 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
| 1184 | virtual void GenConversion(Instruction::Code opcode, RegLocation rl_dest, |
| 1185 | RegLocation rl_src) = 0; |
Vladimir Marko | 1c282e2 | 2013-11-21 14:49:47 +0000 | [diff] [blame] | 1186 | virtual bool GenInlinedCas(CallInfo* info, bool is_long, bool is_object) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1187 | |
| 1188 | /** |
| 1189 | * @brief Used to generate code for intrinsic java\.lang\.Math methods min and max. |
| 1190 | * @details This is also applicable for java\.lang\.StrictMath since it is a simple algorithm |
| 1191 | * that applies on integers. The generated code will write the smallest or largest value |
| 1192 | * directly into the destination register as specified by the invoke information. |
| 1193 | * @param info Information about the invoke. |
| 1194 | * @param is_min If true generates code that computes minimum. Otherwise computes maximum. |
| 1195 | * @return Returns true if successfully generated |
| 1196 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1197 | virtual bool GenInlinedMinMaxInt(CallInfo* info, bool is_min) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1198 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1199 | virtual bool GenInlinedSqrt(CallInfo* info) = 0; |
Vladimir Marko | e508a20 | 2013-11-04 15:24:22 +0000 | [diff] [blame] | 1200 | virtual bool GenInlinedPeek(CallInfo* info, OpSize size) = 0; |
| 1201 | virtual bool GenInlinedPoke(CallInfo* info, OpSize size) = 0; |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 1202 | virtual void GenNotLong(RegLocation rl_dest, RegLocation rl_src) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1203 | virtual void GenNegLong(RegLocation rl_dest, RegLocation rl_src) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1204 | virtual void GenOrLong(Instruction::Code, RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1205 | RegLocation rl_src2) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1206 | virtual void GenSubLong(Instruction::Code, RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1207 | RegLocation rl_src2) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1208 | virtual void GenXorLong(Instruction::Code, RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1209 | RegLocation rl_src2) = 0; |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 1210 | virtual void GenDivRemLong(Instruction::Code, RegLocation rl_dest, RegLocation rl_src1, |
| 1211 | RegLocation rl_src2, bool is_div) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1212 | virtual RegLocation GenDivRem(RegLocation rl_dest, RegStorage reg_lo, RegStorage reg_hi, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1213 | bool is_div) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1214 | virtual RegLocation GenDivRemLit(RegLocation rl_dest, RegStorage reg_lo, int lit, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1215 | bool is_div) = 0; |
Mark Mendell | 2bf31e6 | 2014-01-23 12:13:40 -0800 | [diff] [blame] | 1216 | /* |
| 1217 | * @brief Generate an integer div or rem operation by a literal. |
| 1218 | * @param rl_dest Destination Location. |
| 1219 | * @param rl_src1 Numerator Location. |
| 1220 | * @param rl_src2 Divisor Location. |
| 1221 | * @param is_div 'true' if this is a division, 'false' for a remainder. |
| 1222 | * @param check_zero 'true' if an exception should be generated if the divisor is 0. |
| 1223 | */ |
| 1224 | virtual RegLocation GenDivRem(RegLocation rl_dest, RegLocation rl_src1, |
| 1225 | RegLocation rl_src2, bool is_div, bool check_zero) = 0; |
| 1226 | /* |
| 1227 | * @brief Generate an integer div or rem operation by a literal. |
| 1228 | * @param rl_dest Destination Location. |
| 1229 | * @param rl_src Numerator Location. |
| 1230 | * @param lit Divisor. |
| 1231 | * @param is_div 'true' if this is a division, 'false' for a remainder. |
| 1232 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1233 | virtual RegLocation GenDivRemLit(RegLocation rl_dest, RegLocation rl_src1, int lit, |
| 1234 | bool is_div) = 0; |
| 1235 | virtual void GenCmpLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) = 0; |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1236 | |
| 1237 | /** |
| 1238 | * @brief Used for generating code that throws ArithmeticException if both registers are zero. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1239 | * @details This is used for generating DivideByZero checks when divisor is held in two |
| 1240 | * separate registers. |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 1241 | * @param reg The register holding the pair of 32-bit values. |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1242 | */ |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 1243 | virtual void GenDivZeroCheckWide(RegStorage reg) = 0; |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1244 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1245 | virtual void GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1246 | virtual void GenExitSequence() = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1247 | virtual void GenFillArrayData(DexOffset table_offset, RegLocation rl_src) = 0; |
| 1248 | virtual void GenFusedFPCmpBranch(BasicBlock* bb, MIR* mir, bool gt_bias, bool is_double) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1249 | virtual void GenFusedLongCmpBranch(BasicBlock* bb, MIR* mir) = 0; |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1250 | |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1251 | /* |
| 1252 | * @brief Handle Machine Specific MIR Extended opcodes. |
| 1253 | * @param bb The basic block in which the MIR is from. |
| 1254 | * @param mir The MIR whose opcode is not standard extended MIR. |
| 1255 | * @note Base class implementation will abort for unknown opcodes. |
| 1256 | */ |
| 1257 | virtual void GenMachineSpecificExtendedMethodMIR(BasicBlock* bb, MIR* mir); |
| 1258 | |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1259 | /** |
| 1260 | * @brief Lowers the kMirOpSelect MIR into LIR. |
| 1261 | * @param bb The basic block in which the MIR is from. |
| 1262 | * @param mir The MIR whose opcode is kMirOpSelect. |
| 1263 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1264 | virtual void GenSelect(BasicBlock* bb, MIR* mir) = 0; |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1265 | |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1266 | /** |
| 1267 | * @brief Used to generate a memory barrier in an architecture specific way. |
| 1268 | * @details The last generated LIR will be considered for use as barrier. Namely, |
| 1269 | * if the last LIR can be updated in a way where it will serve the semantics of |
| 1270 | * barrier, then it will be used as such. Otherwise, a new LIR will be generated |
| 1271 | * that can keep the semantics. |
| 1272 | * @param barrier_kind The kind of memory barrier to generate. |
Andreas Gampe | b14329f | 2014-05-15 11:16:06 -0700 | [diff] [blame] | 1273 | * @return whether a new instruction was generated. |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1274 | */ |
Andreas Gampe | b14329f | 2014-05-15 11:16:06 -0700 | [diff] [blame] | 1275 | virtual bool GenMemBarrier(MemBarrierKind barrier_kind) = 0; |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1276 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1277 | virtual void GenMoveException(RegLocation rl_dest) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1278 | virtual void GenMultiplyByTwoBitMultiplier(RegLocation rl_src, RegLocation rl_result, int lit, |
| 1279 | int first_bit, int second_bit) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1280 | virtual void GenNegDouble(RegLocation rl_dest, RegLocation rl_src) = 0; |
| 1281 | virtual void GenNegFloat(RegLocation rl_dest, RegLocation rl_src) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1282 | virtual void GenPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) = 0; |
| 1283 | virtual void GenSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1284 | virtual void GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array, |
| 1285 | RegLocation rl_index, RegLocation rl_dest, int scale) = 0; |
| 1286 | virtual void GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array, |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 1287 | RegLocation rl_index, RegLocation rl_src, int scale, |
| 1288 | bool card_mark) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1289 | virtual void GenShiftImmOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 1290 | RegLocation rl_src1, RegLocation rl_shift) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1291 | |
| 1292 | // Required for target - single operation generators. |
| 1293 | virtual LIR* OpUnconditionalBranch(LIR* target) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1294 | virtual LIR* OpCmpBranch(ConditionCode cond, RegStorage src1, RegStorage src2, LIR* target) = 0; |
| 1295 | virtual LIR* OpCmpImmBranch(ConditionCode cond, RegStorage reg, int check_value, |
| 1296 | LIR* target) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1297 | virtual LIR* OpCondBranch(ConditionCode cc, LIR* target) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1298 | virtual LIR* OpDecAndBranch(ConditionCode c_code, RegStorage reg, LIR* target) = 0; |
| 1299 | virtual LIR* OpFpRegCopy(RegStorage r_dest, RegStorage r_src) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1300 | virtual LIR* OpIT(ConditionCode cond, const char* guide) = 0; |
Dave Allison | 3da67a5 | 2014-04-02 17:03:45 -0700 | [diff] [blame] | 1301 | virtual void OpEndIT(LIR* it) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1302 | virtual LIR* OpMem(OpKind op, RegStorage r_base, int disp) = 0; |
| 1303 | virtual LIR* OpPcRelLoad(RegStorage reg, LIR* target) = 0; |
| 1304 | virtual LIR* OpReg(OpKind op, RegStorage r_dest_src) = 0; |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1305 | virtual void OpRegCopy(RegStorage r_dest, RegStorage r_src) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1306 | virtual LIR* OpRegCopyNoInsert(RegStorage r_dest, RegStorage r_src) = 0; |
| 1307 | virtual LIR* OpRegImm(OpKind op, RegStorage r_dest_src1, int value) = 0; |
| 1308 | virtual LIR* OpRegMem(OpKind op, RegStorage r_dest, RegStorage r_base, int offset) = 0; |
| 1309 | virtual LIR* OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1310 | |
| 1311 | /** |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1312 | * @brief Used to generate an LIR that does a load from mem to reg. |
| 1313 | * @param r_dest The destination physical register. |
| 1314 | * @param r_base The base physical register for memory operand. |
| 1315 | * @param offset The displacement for memory operand. |
| 1316 | * @param move_type Specification on the move desired (size, alignment, register kind). |
| 1317 | * @return Returns the generate move LIR. |
| 1318 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1319 | virtual LIR* OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, |
| 1320 | MoveType move_type) = 0; |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1321 | |
| 1322 | /** |
| 1323 | * @brief Used to generate an LIR that does a store from reg to mem. |
| 1324 | * @param r_base The base physical register for memory operand. |
| 1325 | * @param offset The displacement for memory operand. |
| 1326 | * @param r_src The destination physical register. |
| 1327 | * @param bytes_to_move The number of bytes to move. |
| 1328 | * @param is_aligned Whether the memory location is known to be aligned. |
| 1329 | * @return Returns the generate move LIR. |
| 1330 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1331 | virtual LIR* OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, |
| 1332 | MoveType move_type) = 0; |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1333 | |
| 1334 | /** |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1335 | * @brief Used for generating a conditional register to register operation. |
| 1336 | * @param op The opcode kind. |
| 1337 | * @param cc The condition code that when true will perform the opcode. |
| 1338 | * @param r_dest The destination physical register. |
| 1339 | * @param r_src The source physical register. |
| 1340 | * @return Returns the newly created LIR or null in case of creation failure. |
| 1341 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1342 | virtual LIR* OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1343 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1344 | virtual LIR* OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value) = 0; |
| 1345 | virtual LIR* OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, |
| 1346 | RegStorage r_src2) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1347 | virtual LIR* OpTestSuspend(LIR* target) = 0; |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1348 | virtual LIR* OpThreadMem(OpKind op, ThreadOffset<4> thread_offset) = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1349 | virtual LIR* OpThreadMem(OpKind op, ThreadOffset<8> thread_offset) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1350 | virtual LIR* OpVldm(RegStorage r_base, int count) = 0; |
| 1351 | virtual LIR* OpVstm(RegStorage r_base, int count) = 0; |
| 1352 | virtual void OpLea(RegStorage r_base, RegStorage reg1, RegStorage reg2, int scale, |
| 1353 | int offset) = 0; |
| 1354 | virtual void OpRegCopyWide(RegStorage dest, RegStorage src) = 0; |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1355 | virtual void OpTlsCmp(ThreadOffset<4> offset, int val) = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1356 | virtual void OpTlsCmp(ThreadOffset<8> offset, int val) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1357 | virtual bool InexpensiveConstantInt(int32_t value) = 0; |
| 1358 | virtual bool InexpensiveConstantFloat(int32_t value) = 0; |
| 1359 | virtual bool InexpensiveConstantLong(int64_t value) = 0; |
| 1360 | virtual bool InexpensiveConstantDouble(int64_t value) = 0; |
| 1361 | |
Ian Rogers | d9c4fc9 | 2013-10-01 19:45:43 -0700 | [diff] [blame] | 1362 | // May be optimized by targets. |
| 1363 | virtual void GenMonitorEnter(int opt_flags, RegLocation rl_src); |
| 1364 | virtual void GenMonitorExit(int opt_flags, RegLocation rl_src); |
| 1365 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1366 | // Temp workaround |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1367 | void Workaround7250540(RegLocation rl_dest, RegStorage zero_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1368 | |
| 1369 | protected: |
| 1370 | Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena); |
| 1371 | |
| 1372 | CompilationUnit* GetCompilationUnit() { |
| 1373 | return cu_; |
| 1374 | } |
Mark Mendell | 4708dcd | 2014-01-22 09:05:18 -0800 | [diff] [blame] | 1375 | /* |
| 1376 | * @brief Returns the index of the lowest set bit in 'x'. |
| 1377 | * @param x Value to be examined. |
| 1378 | * @returns The bit number of the lowest bit set in the value. |
| 1379 | */ |
| 1380 | int32_t LowestSetBit(uint64_t x); |
| 1381 | /* |
| 1382 | * @brief Is this value a power of two? |
| 1383 | * @param x Value to be examined. |
| 1384 | * @returns 'true' if only 1 bit is set in the value. |
| 1385 | */ |
| 1386 | bool IsPowerOfTwo(uint64_t x); |
| 1387 | /* |
| 1388 | * @brief Do these SRs overlap? |
| 1389 | * @param rl_op1 One RegLocation |
| 1390 | * @param rl_op2 The other RegLocation |
| 1391 | * @return 'true' if the VR pairs overlap |
| 1392 | * |
| 1393 | * Check to see if a result pair has a misaligned overlap with an operand pair. This |
| 1394 | * is not usual for dx to generate, but it is legal (for now). In a future rev of |
| 1395 | * dex, we'll want to make this case illegal. |
| 1396 | */ |
| 1397 | bool BadOverlap(RegLocation rl_op1, RegLocation rl_op2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1398 | |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1399 | /* |
| 1400 | * @brief Force a location (in a register) into a temporary register |
| 1401 | * @param loc location of result |
| 1402 | * @returns update location |
| 1403 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1404 | virtual RegLocation ForceTemp(RegLocation loc); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1405 | |
| 1406 | /* |
| 1407 | * @brief Force a wide location (in registers) into temporary registers |
| 1408 | * @param loc location of result |
| 1409 | * @returns update location |
| 1410 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1411 | virtual RegLocation ForceTempWide(RegLocation loc); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1412 | |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 1413 | static constexpr OpSize LoadStoreOpSize(bool wide, bool ref) { |
| 1414 | return wide ? k64 : ref ? kReference : k32; |
| 1415 | } |
| 1416 | |
Mark Mendell | df8ee2e | 2014-01-27 16:37:47 -0800 | [diff] [blame] | 1417 | virtual void GenInstanceofFinal(bool use_declaring_class, uint32_t type_idx, |
| 1418 | RegLocation rl_dest, RegLocation rl_src); |
| 1419 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1420 | void AddSlowPath(LIRSlowPath* slowpath); |
| 1421 | |
Mark Mendell | 6607d97 | 2014-02-10 06:54:18 -0800 | [diff] [blame] | 1422 | virtual void GenInstanceofCallingHelper(bool needs_access_check, bool type_known_final, |
| 1423 | bool type_known_abstract, bool use_declaring_class, |
| 1424 | bool can_assume_type_is_in_dex_cache, |
| 1425 | uint32_t type_idx, RegLocation rl_dest, |
| 1426 | RegLocation rl_src); |
Mark Mendell | ae9fd93 | 2014-02-10 16:14:35 -0800 | [diff] [blame] | 1427 | /* |
| 1428 | * @brief Generate the debug_frame FDE information if possible. |
| 1429 | * @returns pointer to vector containg CFE information, or NULL. |
| 1430 | */ |
| 1431 | virtual std::vector<uint8_t>* ReturnCallFrameInformation(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1432 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1433 | /** |
| 1434 | * @brief Used to insert marker that can be used to associate MIR with LIR. |
| 1435 | * @details Only inserts marker if verbosity is enabled. |
| 1436 | * @param mir The mir that is currently being generated. |
| 1437 | */ |
| 1438 | void GenPrintLabel(MIR* mir); |
| 1439 | |
| 1440 | /** |
| 1441 | * @brief Used to generate return sequence when there is no frame. |
| 1442 | * @details Assumes that the return registers have already been populated. |
| 1443 | */ |
| 1444 | virtual void GenSpecialExitSequence() = 0; |
| 1445 | |
| 1446 | /** |
| 1447 | * @brief Used to generate code for special methods that are known to be |
| 1448 | * small enough to work in frameless mode. |
| 1449 | * @param bb The basic block of the first MIR. |
| 1450 | * @param mir The first MIR of the special method. |
| 1451 | * @param special Information about the special method. |
| 1452 | * @return Returns whether or not this was handled successfully. Returns false |
| 1453 | * if caller should punt to normal MIR2LIR conversion. |
| 1454 | */ |
| 1455 | virtual bool GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special); |
| 1456 | |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1457 | protected: |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1458 | void ClobberBody(RegisterInfo* p); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1459 | void SetCurrentDexPc(DexOffset dexpc) { |
| 1460 | current_dalvik_offset_ = dexpc; |
| 1461 | } |
| 1462 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1463 | /** |
| 1464 | * @brief Used to lock register if argument at in_position was passed that way. |
| 1465 | * @details Does nothing if the argument is passed via stack. |
| 1466 | * @param in_position The argument number whose register to lock. |
| 1467 | * @param wide Whether the argument is wide. |
| 1468 | */ |
| 1469 | void LockArg(int in_position, bool wide = false); |
| 1470 | |
| 1471 | /** |
| 1472 | * @brief Used to load VR argument to a physical register. |
| 1473 | * @details The load is only done if the argument is not already in physical register. |
| 1474 | * LockArg must have been previously called. |
| 1475 | * @param in_position The argument number to load. |
| 1476 | * @param wide Whether the argument is 64-bit or not. |
| 1477 | * @return Returns the register (or register pair) for the loaded argument. |
| 1478 | */ |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 1479 | RegStorage LoadArg(int in_position, RegisterClass reg_class, bool wide = false); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1480 | |
| 1481 | /** |
| 1482 | * @brief Used to load a VR argument directly to a specified register location. |
| 1483 | * @param in_position The argument number to place in register. |
| 1484 | * @param rl_dest The register location where to place argument. |
| 1485 | */ |
| 1486 | void LoadArgDirect(int in_position, RegLocation rl_dest); |
| 1487 | |
| 1488 | /** |
| 1489 | * @brief Used to generate LIR for special getter method. |
| 1490 | * @param mir The mir that represents the iget. |
| 1491 | * @param special Information about the special getter method. |
| 1492 | * @return Returns whether LIR was successfully generated. |
| 1493 | */ |
| 1494 | bool GenSpecialIGet(MIR* mir, const InlineMethod& special); |
| 1495 | |
| 1496 | /** |
| 1497 | * @brief Used to generate LIR for special setter method. |
| 1498 | * @param mir The mir that represents the iput. |
| 1499 | * @param special Information about the special setter method. |
| 1500 | * @return Returns whether LIR was successfully generated. |
| 1501 | */ |
| 1502 | bool GenSpecialIPut(MIR* mir, const InlineMethod& special); |
| 1503 | |
| 1504 | /** |
| 1505 | * @brief Used to generate LIR for special return-args method. |
| 1506 | * @param mir The mir that represents the return of argument. |
| 1507 | * @param special Information about the special return-args method. |
| 1508 | * @return Returns whether LIR was successfully generated. |
| 1509 | */ |
| 1510 | bool GenSpecialIdentity(MIR* mir, const InlineMethod& special); |
| 1511 | |
Mingyao Yang | 4289456 | 2014-04-07 12:42:16 -0700 | [diff] [blame] | 1512 | void AddDivZeroCheckSlowPath(LIR* branch); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1513 | |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 1514 | // Copy arg0 and arg1 to kArg0 and kArg1 safely, possibly using |
| 1515 | // kArg2 as temp. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1516 | virtual void CopyToArgumentRegs(RegStorage arg0, RegStorage arg1); |
| 1517 | |
| 1518 | /** |
| 1519 | * @brief Load Constant into RegLocation |
| 1520 | * @param rl_dest Destination RegLocation |
| 1521 | * @param value Constant value |
| 1522 | */ |
| 1523 | virtual void GenConst(RegLocation rl_dest, int value); |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 1524 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1525 | public: |
| 1526 | // TODO: add accessors for these. |
| 1527 | LIR* literal_list_; // Constants. |
| 1528 | LIR* method_literal_list_; // Method literals requiring patching. |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 1529 | LIR* class_literal_list_; // Class literals requiring patching. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1530 | LIR* code_literal_list_; // Code literals requiring patching. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1531 | LIR* first_fixup_; // Doubly-linked list of LIR nodes requiring fixups. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1532 | |
| 1533 | protected: |
| 1534 | CompilationUnit* const cu_; |
| 1535 | MIRGraph* const mir_graph_; |
| 1536 | GrowableArray<SwitchTable*> switch_tables_; |
| 1537 | GrowableArray<FillArrayData*> fill_array_data_; |
buzbee | bd663de | 2013-09-10 15:41:31 -0700 | [diff] [blame] | 1538 | GrowableArray<RegisterInfo*> tempreg_info_; |
| 1539 | GrowableArray<RegisterInfo*> reginfo_map_; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1540 | GrowableArray<void*> pointer_storage_; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1541 | CodeOffset current_code_offset_; // Working byte offset of machine instructons. |
| 1542 | CodeOffset data_offset_; // starting offset of literal pool. |
| 1543 | size_t total_size_; // header + code size. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1544 | LIR* block_label_list_; |
| 1545 | PromotionMap* promotion_map_; |
| 1546 | /* |
| 1547 | * TODO: The code generation utilities don't have a built-in |
| 1548 | * mechanism to propagate the original Dalvik opcode address to the |
| 1549 | * associated generated instructions. For the trace compiler, this wasn't |
| 1550 | * necessary because the interpreter handled all throws and debugging |
| 1551 | * requests. For now we'll handle this by placing the Dalvik offset |
| 1552 | * in the CompilationUnit struct before codegen for each instruction. |
| 1553 | * The low-level LIR creation utilites will pull it from here. Rework this. |
| 1554 | */ |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1555 | DexOffset current_dalvik_offset_; |
| 1556 | size_t estimated_native_code_size_; // Just an estimate; used to reserve code_buffer_ size. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1557 | RegisterPool* reg_pool_; |
| 1558 | /* |
| 1559 | * Sanity checking for the register temp tracking. The same ssa |
| 1560 | * name should never be associated with one temp register per |
| 1561 | * instruction compilation. |
| 1562 | */ |
| 1563 | int live_sreg_; |
| 1564 | CodeBuffer code_buffer_; |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 1565 | // The encoding mapping table data (dex -> pc offset and pc offset -> dex) with a size prefix. |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 1566 | std::vector<uint8_t> encoded_mapping_table_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1567 | std::vector<uint32_t> core_vmap_table_; |
| 1568 | std::vector<uint32_t> fp_vmap_table_; |
| 1569 | std::vector<uint8_t> native_gc_map_; |
| 1570 | int num_core_spills_; |
| 1571 | int num_fp_spills_; |
| 1572 | int frame_size_; |
| 1573 | unsigned int core_spill_mask_; |
| 1574 | unsigned int fp_spill_mask_; |
| 1575 | LIR* first_lir_insn_; |
| 1576 | LIR* last_lir_insn_; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1577 | |
| 1578 | GrowableArray<LIRSlowPath*> slow_paths_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1579 | }; // Class Mir2Lir |
| 1580 | |
| 1581 | } // namespace art |
| 1582 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 1583 | #endif // ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |