Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |
| 18 | #define ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 19 | |
| 20 | #include "invoke_type.h" |
| 21 | #include "compiled_method.h" |
| 22 | #include "dex/compiler_enums.h" |
| 23 | #include "dex/compiler_ir.h" |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 24 | #include "dex/reg_location.h" |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 25 | #include "dex/reg_storage.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 26 | #include "dex/backend.h" |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 27 | #include "dex/quick/resource_mask.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 28 | #include "driver/compiler_driver.h" |
Andreas Gampe | 7cd26f3 | 2014-06-18 17:01:15 -0700 | [diff] [blame] | 29 | #include "instruction_set.h" |
Brian Carlstrom | a1ce1fe | 2014-02-24 23:23:58 -0800 | [diff] [blame] | 30 | #include "leb128.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 31 | #include "safe_map.h" |
Vladimir Marko | 089142c | 2014-06-05 10:57:05 +0100 | [diff] [blame] | 32 | #include "utils/array_ref.h" |
Nicolas Geoffray | 818f210 | 2014-02-18 16:43:35 +0000 | [diff] [blame] | 33 | #include "utils/arena_allocator.h" |
| 34 | #include "utils/growable_array.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 35 | |
| 36 | namespace art { |
| 37 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 38 | /* |
| 39 | * TODO: refactoring pass to move these (and other) typdefs towards usage style of runtime to |
| 40 | * add type safety (see runtime/offsets.h). |
| 41 | */ |
| 42 | typedef uint32_t DexOffset; // Dex offset in code units. |
| 43 | typedef uint16_t NarrowDexOffset; // For use in structs, Dex offsets range from 0 .. 0xffff. |
| 44 | typedef uint32_t CodeOffset; // Native code offset in bytes. |
| 45 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 46 | // Set to 1 to measure cost of suspend check. |
| 47 | #define NO_SUSPEND 0 |
| 48 | |
| 49 | #define IS_BINARY_OP (1ULL << kIsBinaryOp) |
| 50 | #define IS_BRANCH (1ULL << kIsBranch) |
| 51 | #define IS_IT (1ULL << kIsIT) |
| 52 | #define IS_LOAD (1ULL << kMemLoad) |
| 53 | #define IS_QUAD_OP (1ULL << kIsQuadOp) |
| 54 | #define IS_QUIN_OP (1ULL << kIsQuinOp) |
| 55 | #define IS_SEXTUPLE_OP (1ULL << kIsSextupleOp) |
| 56 | #define IS_STORE (1ULL << kMemStore) |
| 57 | #define IS_TERTIARY_OP (1ULL << kIsTertiaryOp) |
| 58 | #define IS_UNARY_OP (1ULL << kIsUnaryOp) |
| 59 | #define NEEDS_FIXUP (1ULL << kPCRelFixup) |
| 60 | #define NO_OPERAND (1ULL << kNoOperand) |
| 61 | #define REG_DEF0 (1ULL << kRegDef0) |
| 62 | #define REG_DEF1 (1ULL << kRegDef1) |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 63 | #define REG_DEF2 (1ULL << kRegDef2) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 64 | #define REG_DEFA (1ULL << kRegDefA) |
| 65 | #define REG_DEFD (1ULL << kRegDefD) |
| 66 | #define REG_DEF_FPCS_LIST0 (1ULL << kRegDefFPCSList0) |
| 67 | #define REG_DEF_FPCS_LIST2 (1ULL << kRegDefFPCSList2) |
| 68 | #define REG_DEF_LIST0 (1ULL << kRegDefList0) |
| 69 | #define REG_DEF_LIST1 (1ULL << kRegDefList1) |
| 70 | #define REG_DEF_LR (1ULL << kRegDefLR) |
| 71 | #define REG_DEF_SP (1ULL << kRegDefSP) |
| 72 | #define REG_USE0 (1ULL << kRegUse0) |
| 73 | #define REG_USE1 (1ULL << kRegUse1) |
| 74 | #define REG_USE2 (1ULL << kRegUse2) |
| 75 | #define REG_USE3 (1ULL << kRegUse3) |
| 76 | #define REG_USE4 (1ULL << kRegUse4) |
| 77 | #define REG_USEA (1ULL << kRegUseA) |
| 78 | #define REG_USEC (1ULL << kRegUseC) |
| 79 | #define REG_USED (1ULL << kRegUseD) |
Vladimir Marko | 70b797d | 2013-12-03 15:25:24 +0000 | [diff] [blame] | 80 | #define REG_USEB (1ULL << kRegUseB) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 81 | #define REG_USE_FPCS_LIST0 (1ULL << kRegUseFPCSList0) |
| 82 | #define REG_USE_FPCS_LIST2 (1ULL << kRegUseFPCSList2) |
| 83 | #define REG_USE_LIST0 (1ULL << kRegUseList0) |
| 84 | #define REG_USE_LIST1 (1ULL << kRegUseList1) |
| 85 | #define REG_USE_LR (1ULL << kRegUseLR) |
| 86 | #define REG_USE_PC (1ULL << kRegUsePC) |
| 87 | #define REG_USE_SP (1ULL << kRegUseSP) |
| 88 | #define SETS_CCODES (1ULL << kSetsCCodes) |
| 89 | #define USES_CCODES (1ULL << kUsesCCodes) |
Serguei Katkov | e90501d | 2014-03-12 15:56:54 +0700 | [diff] [blame] | 90 | #define USE_FP_STACK (1ULL << kUseFpStack) |
buzbee | 9da5c10 | 2014-03-28 12:59:18 -0700 | [diff] [blame] | 91 | #define REG_USE_LO (1ULL << kUseLo) |
| 92 | #define REG_USE_HI (1ULL << kUseHi) |
| 93 | #define REG_DEF_LO (1ULL << kDefLo) |
| 94 | #define REG_DEF_HI (1ULL << kDefHi) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 95 | |
| 96 | // Common combo register usage patterns. |
| 97 | #define REG_DEF01 (REG_DEF0 | REG_DEF1) |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 98 | #define REG_DEF012 (REG_DEF0 | REG_DEF1 | REG_DEF2) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 99 | #define REG_DEF01_USE2 (REG_DEF0 | REG_DEF1 | REG_USE2) |
| 100 | #define REG_DEF0_USE01 (REG_DEF0 | REG_USE01) |
| 101 | #define REG_DEF0_USE0 (REG_DEF0 | REG_USE0) |
| 102 | #define REG_DEF0_USE12 (REG_DEF0 | REG_USE12) |
Vladimir Marko | 3e5af82 | 2013-11-21 15:01:20 +0000 | [diff] [blame] | 103 | #define REG_DEF0_USE123 (REG_DEF0 | REG_USE123) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 104 | #define REG_DEF0_USE1 (REG_DEF0 | REG_USE1) |
| 105 | #define REG_DEF0_USE2 (REG_DEF0 | REG_USE2) |
| 106 | #define REG_DEFAD_USEAD (REG_DEFAD_USEA | REG_USED) |
| 107 | #define REG_DEFAD_USEA (REG_DEFA_USEA | REG_DEFD) |
| 108 | #define REG_DEFA_USEA (REG_DEFA | REG_USEA) |
| 109 | #define REG_USE012 (REG_USE01 | REG_USE2) |
| 110 | #define REG_USE014 (REG_USE01 | REG_USE4) |
| 111 | #define REG_USE01 (REG_USE0 | REG_USE1) |
| 112 | #define REG_USE02 (REG_USE0 | REG_USE2) |
| 113 | #define REG_USE12 (REG_USE1 | REG_USE2) |
| 114 | #define REG_USE23 (REG_USE2 | REG_USE3) |
Vladimir Marko | 3e5af82 | 2013-11-21 15:01:20 +0000 | [diff] [blame] | 115 | #define REG_USE123 (REG_USE1 | REG_USE2 | REG_USE3) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 116 | |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 117 | // TODO: #includes need a cleanup |
| 118 | #ifndef INVALID_SREG |
| 119 | #define INVALID_SREG (-1) |
| 120 | #endif |
| 121 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 122 | struct BasicBlock; |
| 123 | struct CallInfo; |
| 124 | struct CompilationUnit; |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 125 | struct InlineMethod; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 126 | struct MIR; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 127 | struct LIR; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 128 | struct RegisterInfo; |
Vladimir Marko | 5c96e6b | 2013-11-14 15:34:17 +0000 | [diff] [blame] | 129 | class DexFileMethodInliner; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 130 | class MIRGraph; |
| 131 | class Mir2Lir; |
| 132 | |
| 133 | typedef int (*NextCallInsn)(CompilationUnit*, CallInfo*, int, |
| 134 | const MethodReference& target_method, |
| 135 | uint32_t method_idx, uintptr_t direct_code, |
| 136 | uintptr_t direct_method, InvokeType type); |
| 137 | |
| 138 | typedef std::vector<uint8_t> CodeBuffer; |
| 139 | |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 140 | struct UseDefMasks { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 141 | const ResourceMask* use_mask; // Resource mask for use. |
| 142 | const ResourceMask* def_mask; // Resource mask for def. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 143 | }; |
| 144 | |
| 145 | struct AssemblyInfo { |
| 146 | LIR* pcrel_next; // Chain of LIR nodes needing pc relative fixups. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 147 | }; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 148 | |
| 149 | struct LIR { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 150 | CodeOffset offset; // Offset of this instruction. |
| 151 | NarrowDexOffset dalvik_offset; // Offset of Dalvik opcode in code units (16-bit words). |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 152 | int16_t opcode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 153 | LIR* next; |
| 154 | LIR* prev; |
| 155 | LIR* target; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 156 | struct { |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 157 | unsigned int alias_info:17; // For Dalvik register disambiguation. |
| 158 | bool is_nop:1; // LIR is optimized away. |
| 159 | unsigned int size:4; // Note: size of encoded instruction is in bytes. |
| 160 | bool use_def_invalid:1; // If true, masks should not be used. |
| 161 | unsigned int generation:1; // Used to track visitation state during fixup pass. |
| 162 | unsigned int fixup:8; // Fixup kind. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 163 | } flags; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 164 | union { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 165 | UseDefMasks m; // Use & Def masks used during optimization. |
Vladimir Marko | 306f017 | 2014-01-07 18:21:20 +0000 | [diff] [blame] | 166 | AssemblyInfo a; // Instruction info used during assembly phase. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 167 | } u; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 168 | int32_t operands[5]; // [0..4] = [dest, src1, src2, extra, extra2]. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 169 | }; |
| 170 | |
| 171 | // Target-specific initialization. |
| 172 | Mir2Lir* ArmCodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 173 | ArenaAllocator* const arena); |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 174 | Mir2Lir* Arm64CodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 175 | ArenaAllocator* const arena); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 176 | Mir2Lir* MipsCodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 177 | ArenaAllocator* const arena); |
| 178 | Mir2Lir* X86CodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, |
| 179 | ArenaAllocator* const arena); |
| 180 | |
| 181 | // Utility macros to traverse the LIR list. |
| 182 | #define NEXT_LIR(lir) (lir->next) |
| 183 | #define PREV_LIR(lir) (lir->prev) |
| 184 | |
| 185 | // Defines for alias_info (tracks Dalvik register references). |
| 186 | #define DECODE_ALIAS_INFO_REG(X) (X & 0xffff) |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 187 | #define DECODE_ALIAS_INFO_WIDE_FLAG (0x10000) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 188 | #define DECODE_ALIAS_INFO_WIDE(X) ((X & DECODE_ALIAS_INFO_WIDE_FLAG) ? 1 : 0) |
| 189 | #define ENCODE_ALIAS_INFO(REG, ISWIDE) (REG | (ISWIDE ? DECODE_ALIAS_INFO_WIDE_FLAG : 0)) |
| 190 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 191 | #define ENCODE_REG_PAIR(low_reg, high_reg) ((low_reg & 0xff) | ((high_reg & 0xff) << 8)) |
| 192 | #define DECODE_REG_PAIR(both_regs, low_reg, high_reg) \ |
| 193 | do { \ |
| 194 | low_reg = both_regs & 0xff; \ |
| 195 | high_reg = (both_regs >> 8) & 0xff; \ |
| 196 | } while (false) |
| 197 | |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 198 | // Mask to denote sreg as the start of a 64-bit item. Must not interfere with low 16 bits. |
| 199 | #define STARTING_WIDE_SREG 0x10000 |
buzbee | c729a6b | 2013-09-14 16:04:31 -0700 | [diff] [blame] | 200 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 201 | // TODO: replace these macros |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 202 | #define SLOW_FIELD_PATH (cu_->enable_debug & (1 << kDebugSlowFieldPath)) |
| 203 | #define SLOW_INVOKE_PATH (cu_->enable_debug & (1 << kDebugSlowInvokePath)) |
| 204 | #define SLOW_STRING_PATH (cu_->enable_debug & (1 << kDebugSlowStringPath)) |
| 205 | #define SLOW_TYPE_PATH (cu_->enable_debug & (1 << kDebugSlowTypePath)) |
| 206 | #define EXERCISE_SLOWEST_STRING_PATH (cu_->enable_debug & (1 << kDebugSlowestStringPath)) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 207 | |
Andreas Gampe | 7cd26f3 | 2014-06-18 17:01:15 -0700 | [diff] [blame] | 208 | // Size of a frame that we definitely consider large. Anything larger than this should |
| 209 | // definitely get a stack overflow check. |
| 210 | static constexpr size_t kLargeFrameSize = 2 * KB; |
| 211 | |
| 212 | // Size of a frame that should be small. Anything leaf method smaller than this should run |
| 213 | // without a stack overflow check. |
| 214 | // The constant is from experience with frameworks code. |
| 215 | static constexpr size_t kSmallFrameSize = 1 * KB; |
| 216 | |
| 217 | // Determine whether a frame is small or large, used in the decision on whether to elide a |
| 218 | // stack overflow check on method entry. |
| 219 | // |
| 220 | // A frame is considered large when it's either above kLargeFrameSize, or a quarter of the |
| 221 | // overflow-usable stack space. |
| 222 | static constexpr bool IsLargeFrame(size_t size, InstructionSet isa) { |
| 223 | return size >= kLargeFrameSize || size >= GetStackOverflowReservedBytes(isa) / 4; |
| 224 | } |
| 225 | |
| 226 | // We want to ensure that on all systems kSmallFrameSize will lead to false in IsLargeFrame. |
| 227 | COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kArm), |
| 228 | kSmallFrameSize_is_not_a_small_frame_arm); |
| 229 | COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kArm64), |
| 230 | kSmallFrameSize_is_not_a_small_frame_arm64); |
| 231 | COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kMips), |
| 232 | kSmallFrameSize_is_not_a_small_frame_mips); |
| 233 | COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kX86), |
| 234 | kSmallFrameSize_is_not_a_small_frame_x86); |
| 235 | COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kX86_64), |
| 236 | kSmallFrameSize_is_not_a_small_frame_x64_64); |
| 237 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 238 | class Mir2Lir : public Backend { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 239 | public: |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 240 | static constexpr bool kFailOnSizeError = true && kIsDebugBuild; |
| 241 | static constexpr bool kReportSizeError = true && kIsDebugBuild; |
| 242 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 243 | /* |
| 244 | * Auxiliary information describing the location of data embedded in the Dalvik |
| 245 | * byte code stream. |
| 246 | */ |
| 247 | struct EmbeddedData { |
| 248 | CodeOffset offset; // Code offset of data block. |
| 249 | const uint16_t* table; // Original dex data. |
| 250 | DexOffset vaddr; // Dalvik offset of parent opcode. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 251 | }; |
| 252 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 253 | struct FillArrayData : EmbeddedData { |
| 254 | int32_t size; |
| 255 | }; |
| 256 | |
| 257 | struct SwitchTable : EmbeddedData { |
| 258 | LIR* anchor; // Reference instruction for relative offsets. |
| 259 | LIR** targets; // Array of case targets. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 260 | }; |
| 261 | |
| 262 | /* Static register use counts */ |
| 263 | struct RefCounts { |
| 264 | int count; |
| 265 | int s_reg; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 266 | }; |
| 267 | |
| 268 | /* |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 269 | * Data structure tracking the mapping detween a Dalvik value (32 or 64 bits) |
| 270 | * and native register storage. The primary purpose is to reuse previuosly |
| 271 | * loaded values, if possible, and otherwise to keep the value in register |
| 272 | * storage as long as possible. |
| 273 | * |
| 274 | * NOTE 1: wide_value refers to the width of the Dalvik value contained in |
| 275 | * this register (or pair). For example, a 64-bit register containing a 32-bit |
| 276 | * Dalvik value would have wide_value==false even though the storage container itself |
| 277 | * is wide. Similarly, a 32-bit register containing half of a 64-bit Dalvik value |
| 278 | * would have wide_value==true (and additionally would have its partner field set to the |
| 279 | * other half whose wide_value field would also be true. |
| 280 | * |
| 281 | * NOTE 2: In the case of a register pair, you can determine which of the partners |
| 282 | * is the low half by looking at the s_reg names. The high s_reg will equal low_sreg + 1. |
| 283 | * |
| 284 | * NOTE 3: In the case of a 64-bit register holding a Dalvik wide value, wide_value |
| 285 | * will be true and partner==self. s_reg refers to the low-order word of the Dalvik |
| 286 | * value, and the s_reg of the high word is implied (s_reg + 1). |
| 287 | * |
| 288 | * NOTE 4: The reg and is_temp fields should always be correct. If is_temp is false no |
| 289 | * other fields have meaning. [perhaps not true, wide should work for promoted regs?] |
| 290 | * If is_temp==true and live==false, no other fields have |
| 291 | * meaning. If is_temp==true and live==true, wide_value, partner, dirty, s_reg, def_start |
| 292 | * and def_end describe the relationship between the temp register/register pair and |
| 293 | * the Dalvik value[s] described by s_reg/s_reg+1. |
| 294 | * |
| 295 | * The fields used_storage, master_storage and storage_mask are used to track allocation |
| 296 | * in light of potential aliasing. For example, consider Arm's d2, which overlaps s4 & s5. |
| 297 | * d2's storage mask would be 0x00000003, the two low-order bits denoting 64 bits of |
| 298 | * storage use. For s4, it would be 0x0000001; for s5 0x00000002. These values should not |
| 299 | * change once initialized. The "used_storage" field tracks current allocation status. |
| 300 | * Although each record contains this field, only the field from the largest member of |
| 301 | * an aliased group is used. In our case, it would be d2's. The master_storage pointer |
| 302 | * of d2, s4 and s5 would all point to d2's used_storage field. Each bit in a used_storage |
| 303 | * represents 32 bits of storage. d2's used_storage would be initialized to 0xfffffffc. |
| 304 | * Then, if we wanted to determine whether s4 could be allocated, we would "and" |
| 305 | * s4's storage_mask with s4's *master_storage. If the result is zero, s4 is free and |
| 306 | * to allocate: *master_storage |= storage_mask. To free, *master_storage &= ~storage_mask. |
| 307 | * |
| 308 | * For an X86 vector register example, storage_mask would be: |
| 309 | * 0x00000001 for 32-bit view of xmm1 |
| 310 | * 0x00000003 for 64-bit view of xmm1 |
| 311 | * 0x0000000f for 128-bit view of xmm1 |
| 312 | * 0x000000ff for 256-bit view of ymm1 // future expansion, if needed |
| 313 | * 0x0000ffff for 512-bit view of ymm1 // future expansion, if needed |
| 314 | * 0xffffffff for 1024-bit view of ymm1 // future expansion, if needed |
| 315 | * |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 316 | * The "liveness" of a register is handled in a similar way. The liveness_ storage is |
| 317 | * held in the widest member of an aliased set. Note, though, that for a temp register to |
| 318 | * reused as live, it must both be marked live and the associated SReg() must match the |
| 319 | * desired s_reg. This gets a little complicated when dealing with aliased registers. All |
| 320 | * members of an aliased set will share the same liveness flags, but each will individually |
| 321 | * maintain s_reg_. In this way we can know that at least one member of an |
| 322 | * aliased set is live, but will only fully match on the appropriate alias view. For example, |
| 323 | * if Arm d1 is live as a double and has s_reg_ set to Dalvik v8 (which also implies v9 |
| 324 | * because it is wide), its aliases s2 and s3 will show as live, but will have |
| 325 | * s_reg_ == INVALID_SREG. An attempt to later AllocLiveReg() of v9 with a single-precision |
| 326 | * view will fail because although s3's liveness bit is set, its s_reg_ will not match v9. |
| 327 | * This will cause all members of the aliased set to be clobbered and AllocLiveReg() will |
| 328 | * report that v9 is currently not live as a single (which is what we want). |
| 329 | * |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 330 | * NOTE: the x86 usage is still somewhat in flux. There are competing notions of how |
| 331 | * to treat xmm registers: |
| 332 | * 1. Treat them all as 128-bits wide, but denote how much data used via bytes field. |
| 333 | * o This more closely matches reality, but means you'd need to be able to get |
| 334 | * to the associated RegisterInfo struct to figure out how it's being used. |
| 335 | * o This is how 64-bit core registers will be used - always 64 bits, but the |
| 336 | * "bytes" field will be 4 for 32-bit usage and 8 for 64-bit usage. |
| 337 | * 2. View the xmm registers based on contents. |
| 338 | * o A single in a xmm2 register would be k32BitVector, while a double in xmm2 would |
| 339 | * be a k64BitVector. |
| 340 | * o Note that the two uses above would be considered distinct registers (but with |
| 341 | * the aliasing mechanism, we could detect interference). |
| 342 | * o This is how aliased double and single float registers will be handled on |
| 343 | * Arm and MIPS. |
| 344 | * Working plan is, for all targets, to follow mechanism 1 for 64-bit core registers, and |
| 345 | * mechanism 2 for aliased float registers and x86 vector registers. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 346 | */ |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 347 | class RegisterInfo { |
| 348 | public: |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 349 | RegisterInfo(RegStorage r, const ResourceMask& mask = kEncodeAll); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 350 | ~RegisterInfo() {} |
| 351 | static void* operator new(size_t size, ArenaAllocator* arena) { |
| 352 | return arena->Alloc(size, kArenaAllocRegAlloc); |
| 353 | } |
| 354 | |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 355 | static const uint32_t k32SoloStorageMask = 0x00000001; |
| 356 | static const uint32_t kLowSingleStorageMask = 0x00000001; |
| 357 | static const uint32_t kHighSingleStorageMask = 0x00000002; |
| 358 | static const uint32_t k64SoloStorageMask = 0x00000003; |
| 359 | static const uint32_t k128SoloStorageMask = 0x0000000f; |
| 360 | static const uint32_t k256SoloStorageMask = 0x000000ff; |
| 361 | static const uint32_t k512SoloStorageMask = 0x0000ffff; |
| 362 | static const uint32_t k1024SoloStorageMask = 0xffffffff; |
| 363 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 364 | bool InUse() { return (storage_mask_ & master_->used_storage_) != 0; } |
| 365 | void MarkInUse() { master_->used_storage_ |= storage_mask_; } |
| 366 | void MarkFree() { master_->used_storage_ &= ~storage_mask_; } |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 367 | // No part of the containing storage is live in this view. |
| 368 | bool IsDead() { return (master_->liveness_ & storage_mask_) == 0; } |
| 369 | // Liveness of this view matches. Note: not equivalent to !IsDead(). |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 370 | bool IsLive() { return (master_->liveness_ & storage_mask_) == storage_mask_; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 371 | void MarkLive(int s_reg) { |
| 372 | // TODO: Anything useful to assert here? |
| 373 | s_reg_ = s_reg; |
| 374 | master_->liveness_ |= storage_mask_; |
| 375 | } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 376 | void MarkDead() { |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 377 | if (SReg() != INVALID_SREG) { |
| 378 | s_reg_ = INVALID_SREG; |
| 379 | master_->liveness_ &= ~storage_mask_; |
| 380 | ResetDefBody(); |
| 381 | } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 382 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 383 | RegStorage GetReg() { return reg_; } |
| 384 | void SetReg(RegStorage reg) { reg_ = reg; } |
| 385 | bool IsTemp() { return is_temp_; } |
| 386 | void SetIsTemp(bool val) { is_temp_ = val; } |
| 387 | bool IsWide() { return wide_value_; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 388 | void SetIsWide(bool val) { |
| 389 | wide_value_ = val; |
| 390 | if (!val) { |
| 391 | // If not wide, reset partner to self. |
| 392 | SetPartner(GetReg()); |
| 393 | } |
| 394 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 395 | bool IsDirty() { return dirty_; } |
| 396 | void SetIsDirty(bool val) { dirty_ = val; } |
| 397 | RegStorage Partner() { return partner_; } |
| 398 | void SetPartner(RegStorage partner) { partner_ = partner; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 399 | int SReg() { return (!IsTemp() || IsLive()) ? s_reg_ : INVALID_SREG; } |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 400 | const ResourceMask& DefUseMask() { return def_use_mask_; } |
| 401 | void SetDefUseMask(const ResourceMask& def_use_mask) { def_use_mask_ = def_use_mask; } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 402 | RegisterInfo* Master() { return master_; } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 403 | void SetMaster(RegisterInfo* master) { |
| 404 | master_ = master; |
| 405 | if (master != this) { |
| 406 | master_->aliased_ = true; |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 407 | DCHECK(alias_chain_ == nullptr); |
| 408 | alias_chain_ = master_->alias_chain_; |
| 409 | master_->alias_chain_ = this; |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 410 | } |
| 411 | } |
| 412 | bool IsAliased() { return aliased_; } |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 413 | RegisterInfo* GetAliasChain() { return alias_chain_; } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 414 | uint32_t StorageMask() { return storage_mask_; } |
| 415 | void SetStorageMask(uint32_t storage_mask) { storage_mask_ = storage_mask; } |
| 416 | LIR* DefStart() { return def_start_; } |
| 417 | void SetDefStart(LIR* def_start) { def_start_ = def_start; } |
| 418 | LIR* DefEnd() { return def_end_; } |
| 419 | void SetDefEnd(LIR* def_end) { def_end_ = def_end; } |
| 420 | void ResetDefBody() { def_start_ = def_end_ = nullptr; } |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 421 | // Find member of aliased set matching storage_used; return nullptr if none. |
| 422 | RegisterInfo* FindMatchingView(uint32_t storage_used) { |
| 423 | RegisterInfo* res = Master(); |
| 424 | for (; res != nullptr; res = res->GetAliasChain()) { |
| 425 | if (res->StorageMask() == storage_used) |
| 426 | break; |
| 427 | } |
| 428 | return res; |
| 429 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 430 | |
| 431 | private: |
| 432 | RegStorage reg_; |
| 433 | bool is_temp_; // Can allocate as temp? |
| 434 | bool wide_value_; // Holds a Dalvik wide value (either itself, or part of a pair). |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 435 | bool dirty_; // If live, is it dirty? |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 436 | bool aliased_; // Is this the master for other aliased RegisterInfo's? |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 437 | RegStorage partner_; // If wide_value, other reg of pair or self if 64-bit register. |
| 438 | int s_reg_; // Name of live value. |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 439 | ResourceMask def_use_mask_; // Resources for this element. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 440 | uint32_t used_storage_; // 1 bit per 4 bytes of storage. Unused by aliases. |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 441 | uint32_t liveness_; // 1 bit per 4 bytes of storage. Unused by aliases. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 442 | RegisterInfo* master_; // Pointer to controlling storage mask. |
| 443 | uint32_t storage_mask_; // Track allocation of sub-units. |
| 444 | LIR *def_start_; // Starting inst in last def sequence. |
| 445 | LIR *def_end_; // Ending inst in last def sequence. |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 446 | RegisterInfo* alias_chain_; // Chain of aliased registers. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 447 | }; |
| 448 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 449 | class RegisterPool { |
| 450 | public: |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 451 | RegisterPool(Mir2Lir* m2l, ArenaAllocator* arena, |
Vladimir Marko | 089142c | 2014-06-05 10:57:05 +0100 | [diff] [blame] | 452 | const ArrayRef<const RegStorage>& core_regs, |
| 453 | const ArrayRef<const RegStorage>& core64_regs, |
| 454 | const ArrayRef<const RegStorage>& sp_regs, |
| 455 | const ArrayRef<const RegStorage>& dp_regs, |
| 456 | const ArrayRef<const RegStorage>& reserved_regs, |
| 457 | const ArrayRef<const RegStorage>& reserved64_regs, |
| 458 | const ArrayRef<const RegStorage>& core_temps, |
| 459 | const ArrayRef<const RegStorage>& core64_temps, |
| 460 | const ArrayRef<const RegStorage>& sp_temps, |
| 461 | const ArrayRef<const RegStorage>& dp_temps); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 462 | ~RegisterPool() {} |
| 463 | static void* operator new(size_t size, ArenaAllocator* arena) { |
| 464 | return arena->Alloc(size, kArenaAllocRegAlloc); |
| 465 | } |
| 466 | void ResetNextTemp() { |
| 467 | next_core_reg_ = 0; |
| 468 | next_sp_reg_ = 0; |
| 469 | next_dp_reg_ = 0; |
| 470 | } |
| 471 | GrowableArray<RegisterInfo*> core_regs_; |
| 472 | int next_core_reg_; |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 473 | GrowableArray<RegisterInfo*> core64_regs_; |
| 474 | int next_core64_reg_; |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 475 | GrowableArray<RegisterInfo*> sp_regs_; // Single precision float. |
| 476 | int next_sp_reg_; |
| 477 | GrowableArray<RegisterInfo*> dp_regs_; // Double precision float. |
| 478 | int next_dp_reg_; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 479 | GrowableArray<RegisterInfo*>* ref_regs_; // Points to core_regs_ or core64_regs_ |
| 480 | int* next_ref_reg_; |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 481 | |
| 482 | private: |
| 483 | Mir2Lir* const m2l_; |
| 484 | }; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 485 | |
| 486 | struct PromotionMap { |
| 487 | RegLocationType core_location:3; |
| 488 | uint8_t core_reg; |
| 489 | RegLocationType fp_location:3; |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 490 | uint8_t fp_reg; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 491 | bool first_in_pair; |
| 492 | }; |
| 493 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 494 | // |
| 495 | // Slow paths. This object is used generate a sequence of code that is executed in the |
| 496 | // slow path. For example, resolving a string or class is slow as it will only be executed |
| 497 | // once (after that it is resolved and doesn't need to be done again). We want slow paths |
| 498 | // to be placed out-of-line, and not require a (mispredicted, probably) conditional forward |
| 499 | // branch over them. |
| 500 | // |
| 501 | // If you want to create a slow path, declare a class derived from LIRSlowPath and provide |
| 502 | // the Compile() function that will be called near the end of the code generated by the |
| 503 | // method. |
| 504 | // |
| 505 | // The basic flow for a slow path is: |
| 506 | // |
| 507 | // CMP reg, #value |
| 508 | // BEQ fromfast |
| 509 | // cont: |
| 510 | // ... |
| 511 | // fast path code |
| 512 | // ... |
| 513 | // more code |
| 514 | // ... |
| 515 | // RETURN |
| 516 | /// |
| 517 | // fromfast: |
| 518 | // ... |
| 519 | // slow path code |
| 520 | // ... |
| 521 | // B cont |
| 522 | // |
| 523 | // So you see we need two labels and two branches. The first branch (called fromfast) is |
| 524 | // the conditional branch to the slow path code. The second label (called cont) is used |
| 525 | // as an unconditional branch target for getting back to the code after the slow path |
| 526 | // has completed. |
| 527 | // |
| 528 | |
| 529 | class LIRSlowPath { |
| 530 | public: |
| 531 | LIRSlowPath(Mir2Lir* m2l, const DexOffset dexpc, LIR* fromfast, |
| 532 | LIR* cont = nullptr) : |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 533 | m2l_(m2l), cu_(m2l->cu_), current_dex_pc_(dexpc), fromfast_(fromfast), cont_(cont) { |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 534 | m2l->StartSlowPath(cont); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 535 | } |
| 536 | virtual ~LIRSlowPath() {} |
| 537 | virtual void Compile() = 0; |
| 538 | |
| 539 | static void* operator new(size_t size, ArenaAllocator* arena) { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 540 | return arena->Alloc(size, kArenaAllocData); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 541 | } |
| 542 | |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 543 | LIR *GetContinuationLabel() { |
| 544 | return cont_; |
| 545 | } |
| 546 | |
| 547 | LIR *GetFromFast() { |
| 548 | return fromfast_; |
| 549 | } |
| 550 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 551 | protected: |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 552 | LIR* GenerateTargetLabel(int opcode = kPseudoTargetLabel); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 553 | |
| 554 | Mir2Lir* const m2l_; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 555 | CompilationUnit* const cu_; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 556 | const DexOffset current_dex_pc_; |
| 557 | LIR* const fromfast_; |
| 558 | LIR* const cont_; |
| 559 | }; |
| 560 | |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 561 | // Helper class for changing mem_ref_type_ until the end of current scope. See mem_ref_type_. |
| 562 | class ScopedMemRefType { |
| 563 | public: |
| 564 | ScopedMemRefType(Mir2Lir* m2l, ResourceMask::ResourceBit new_mem_ref_type) |
| 565 | : m2l_(m2l), |
| 566 | old_mem_ref_type_(m2l->mem_ref_type_) { |
| 567 | m2l_->mem_ref_type_ = new_mem_ref_type; |
| 568 | } |
| 569 | |
| 570 | ~ScopedMemRefType() { |
| 571 | m2l_->mem_ref_type_ = old_mem_ref_type_; |
| 572 | } |
| 573 | |
| 574 | private: |
| 575 | Mir2Lir* const m2l_; |
| 576 | ResourceMask::ResourceBit old_mem_ref_type_; |
| 577 | |
| 578 | DISALLOW_COPY_AND_ASSIGN(ScopedMemRefType); |
| 579 | }; |
| 580 | |
Brian Carlstrom | 9b7085a | 2013-07-18 15:15:21 -0700 | [diff] [blame] | 581 | virtual ~Mir2Lir() {} |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 582 | |
| 583 | int32_t s4FromSwitchData(const void* switch_data) { |
| 584 | return *reinterpret_cast<const int32_t*>(switch_data); |
| 585 | } |
| 586 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 587 | /* |
| 588 | * TODO: this is a trace JIT vestige, and its use should be reconsidered. At the time |
| 589 | * it was introduced, it was intended to be a quick best guess of type without having to |
| 590 | * take the time to do type analysis. Currently, though, we have a much better idea of |
| 591 | * the types of Dalvik virtual registers. Instead of using this for a best guess, why not |
| 592 | * just use our knowledge of type to select the most appropriate register class? |
| 593 | */ |
| 594 | RegisterClass RegClassBySize(OpSize size) { |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 595 | if (size == kReference) { |
| 596 | return kRefReg; |
| 597 | } else { |
| 598 | return (size == kUnsignedHalf || size == kSignedHalf || size == kUnsignedByte || |
| 599 | size == kSignedByte) ? kCoreReg : kAnyReg; |
| 600 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 601 | } |
| 602 | |
| 603 | size_t CodeBufferSizeInBytes() { |
| 604 | return code_buffer_.size() / sizeof(code_buffer_[0]); |
| 605 | } |
| 606 | |
Vladimir Marko | 306f017 | 2014-01-07 18:21:20 +0000 | [diff] [blame] | 607 | static bool IsPseudoLirOp(int opcode) { |
buzbee | 409fe94 | 2013-10-11 10:49:56 -0700 | [diff] [blame] | 608 | return (opcode < 0); |
| 609 | } |
| 610 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 611 | /* |
| 612 | * LIR operands are 32-bit integers. Sometimes, (especially for managing |
| 613 | * instructions which require PC-relative fixups), we need the operands to carry |
| 614 | * pointers. To do this, we assign these pointers an index in pointer_storage_, and |
| 615 | * hold that index in the operand array. |
| 616 | * TUNING: If use of these utilities becomes more common on 32-bit builds, it |
| 617 | * may be worth conditionally-compiling a set of identity functions here. |
| 618 | */ |
| 619 | uint32_t WrapPointer(void* pointer) { |
| 620 | uint32_t res = pointer_storage_.Size(); |
| 621 | pointer_storage_.Insert(pointer); |
| 622 | return res; |
| 623 | } |
| 624 | |
| 625 | void* UnwrapPointer(size_t index) { |
| 626 | return pointer_storage_.Get(index); |
| 627 | } |
| 628 | |
| 629 | // strdup(), but allocates from the arena. |
| 630 | char* ArenaStrdup(const char* str) { |
| 631 | size_t len = strlen(str) + 1; |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 632 | char* res = reinterpret_cast<char*>(arena_->Alloc(len, kArenaAllocMisc)); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 633 | if (res != NULL) { |
| 634 | strncpy(res, str, len); |
| 635 | } |
| 636 | return res; |
| 637 | } |
| 638 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 639 | // Shared by all targets - implemented in codegen_util.cc |
| 640 | void AppendLIR(LIR* lir); |
| 641 | void InsertLIRBefore(LIR* current_lir, LIR* new_lir); |
| 642 | void InsertLIRAfter(LIR* current_lir, LIR* new_lir); |
| 643 | |
Razvan A Lupusoru | da7a69b | 2014-01-08 15:09:50 -0800 | [diff] [blame] | 644 | /** |
| 645 | * @brief Provides the maximum number of compiler temporaries that the backend can/wants |
| 646 | * to place in a frame. |
| 647 | * @return Returns the maximum number of compiler temporaries. |
| 648 | */ |
| 649 | size_t GetMaxPossibleCompilerTemps() const; |
| 650 | |
| 651 | /** |
| 652 | * @brief Provides the number of bytes needed in frame for spilling of compiler temporaries. |
| 653 | * @return Returns the size in bytes for space needed for compiler temporary spill region. |
| 654 | */ |
| 655 | size_t GetNumBytesForCompilerTempSpillRegion(); |
| 656 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 657 | DexOffset GetCurrentDexPc() const { |
| 658 | return current_dalvik_offset_; |
| 659 | } |
| 660 | |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 661 | RegisterClass ShortyToRegClass(char shorty_type); |
| 662 | RegisterClass LocToRegClass(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 663 | int ComputeFrameSize(); |
| 664 | virtual void Materialize(); |
| 665 | virtual CompiledMethod* GetCompiledMethod(); |
| 666 | void MarkSafepointPC(LIR* inst); |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 667 | void MarkSafepointPCAfter(LIR* after); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 668 | void SetupResourceMasks(LIR* lir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 669 | void SetMemRefType(LIR* lir, bool is_load, int mem_type); |
| 670 | void AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load, bool is64bit); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 671 | void SetupRegMask(ResourceMask* mask, int reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 672 | void DumpLIRInsn(LIR* arg, unsigned char* base_addr); |
| 673 | void DumpPromotionMap(); |
| 674 | void CodegenDump(); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 675 | LIR* RawLIR(DexOffset dalvik_offset, int opcode, int op0 = 0, int op1 = 0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 676 | int op2 = 0, int op3 = 0, int op4 = 0, LIR* target = NULL); |
| 677 | LIR* NewLIR0(int opcode); |
| 678 | LIR* NewLIR1(int opcode, int dest); |
| 679 | LIR* NewLIR2(int opcode, int dest, int src1); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 680 | LIR* NewLIR2NoDest(int opcode, int src, int info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 681 | LIR* NewLIR3(int opcode, int dest, int src1, int src2); |
| 682 | LIR* NewLIR4(int opcode, int dest, int src1, int src2, int info); |
| 683 | LIR* NewLIR5(int opcode, int dest, int src1, int src2, int info1, int info2); |
| 684 | LIR* ScanLiteralPool(LIR* data_target, int value, unsigned int delta); |
| 685 | LIR* ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi); |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 686 | LIR* ScanLiteralPoolMethod(LIR* data_target, const MethodReference& method); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 687 | LIR* AddWordData(LIR* *constant_list_p, int value); |
| 688 | LIR* AddWideData(LIR* *constant_list_p, int val_lo, int val_hi); |
| 689 | void ProcessSwitchTables(); |
| 690 | void DumpSparseSwitchTable(const uint16_t* table); |
| 691 | void DumpPackedSwitchTable(const uint16_t* table); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 692 | void MarkBoundary(DexOffset offset, const char* inst_str); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 693 | void NopLIR(LIR* lir); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 694 | void UnlinkLIR(LIR* lir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 695 | bool EvaluateBranch(Instruction::Code opcode, int src1, int src2); |
| 696 | bool IsInexpensiveConstant(RegLocation rl_src); |
| 697 | ConditionCode FlipComparisonOrder(ConditionCode before); |
Vladimir Marko | a1a7074 | 2014-03-03 10:28:05 +0000 | [diff] [blame] | 698 | ConditionCode NegateComparison(ConditionCode before); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 699 | virtual void InstallLiteralPools(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 700 | void InstallSwitchTables(); |
| 701 | void InstallFillArrayData(); |
| 702 | bool VerifyCatchEntries(); |
| 703 | void CreateMappingTables(); |
| 704 | void CreateNativeGcMap(); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 705 | int AssignLiteralOffset(CodeOffset offset); |
| 706 | int AssignSwitchTablesOffset(CodeOffset offset); |
| 707 | int AssignFillArrayDataOffset(CodeOffset offset); |
| 708 | LIR* InsertCaseLabel(DexOffset vaddr, int keyVal); |
| 709 | void MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec); |
| 710 | void MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 711 | |
| 712 | virtual void StartSlowPath(LIR *label) {} |
| 713 | virtual void BeginInvoke(CallInfo* info) {} |
| 714 | virtual void EndInvoke(CallInfo* info) {} |
| 715 | |
| 716 | |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 717 | // Handle bookkeeping to convert a wide RegLocation to a narrow RegLocation. No code generated. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 718 | RegLocation NarrowRegLoc(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 719 | |
| 720 | // Shared by all targets - implemented in local_optimizations.cc |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 721 | void ConvertMemOpIntoMove(LIR* orig_lir, RegStorage dest, RegStorage src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 722 | void ApplyLoadStoreElimination(LIR* head_lir, LIR* tail_lir); |
| 723 | void ApplyLoadHoisting(LIR* head_lir, LIR* tail_lir); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 724 | virtual void ApplyLocalOptimizations(LIR* head_lir, LIR* tail_lir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 725 | |
| 726 | // Shared by all targets - implemented in ralloc_util.cc |
| 727 | int GetSRegHi(int lowSreg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 728 | bool LiveOut(int s_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 729 | void SimpleRegAlloc(); |
| 730 | void ResetRegPool(); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 731 | void CompilerInitPool(RegisterInfo* info, RegStorage* regs, int num); |
| 732 | void DumpRegPool(GrowableArray<RegisterInfo*>* regs); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 733 | void DumpCoreRegPool(); |
| 734 | void DumpFpRegPool(); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 735 | void DumpRegPools(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 736 | /* Mark a temp register as dead. Does not affect allocation state. */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 737 | void Clobber(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 738 | void ClobberSReg(int s_reg); |
buzbee | 642fe34 | 2014-05-23 16:04:08 -0700 | [diff] [blame] | 739 | void ClobberAliases(RegisterInfo* info, uint32_t clobber_mask); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 740 | int SRegToPMap(int s_reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 741 | void RecordCorePromotion(RegStorage reg, int s_reg); |
| 742 | RegStorage AllocPreservedCoreReg(int s_reg); |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 743 | void RecordFpPromotion(RegStorage reg, int s_reg); |
| 744 | RegStorage AllocPreservedFpReg(int s_reg); |
| 745 | virtual RegStorage AllocPreservedSingle(int s_reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 746 | virtual RegStorage AllocPreservedDouble(int s_reg); |
| 747 | RegStorage AllocTempBody(GrowableArray<RegisterInfo*> ®s, int* next_temp, bool required); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 748 | virtual RegStorage AllocFreeTemp(); |
| 749 | virtual RegStorage AllocTemp(); |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 750 | virtual RegStorage AllocTempWide(); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 751 | virtual RegStorage AllocTempRef(); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 752 | virtual RegStorage AllocTempSingle(); |
| 753 | virtual RegStorage AllocTempDouble(); |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 754 | virtual RegStorage AllocTypedTemp(bool fp_hint, int reg_class); |
| 755 | virtual RegStorage AllocTypedTempWide(bool fp_hint, int reg_class); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 756 | void FlushReg(RegStorage reg); |
| 757 | void FlushRegWide(RegStorage reg); |
| 758 | RegStorage AllocLiveReg(int s_reg, int reg_class, bool wide); |
| 759 | RegStorage FindLiveReg(GrowableArray<RegisterInfo*> ®s, int s_reg); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 760 | virtual void FreeTemp(RegStorage reg); |
| 761 | virtual void FreeRegLocTemps(RegLocation rl_keep, RegLocation rl_free); |
| 762 | virtual bool IsLive(RegStorage reg); |
| 763 | virtual bool IsTemp(RegStorage reg); |
buzbee | 262b299 | 2014-03-27 11:22:43 -0700 | [diff] [blame] | 764 | bool IsPromoted(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 765 | bool IsDirty(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 766 | void LockTemp(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 767 | void ResetDef(RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 768 | void NullifyRange(RegStorage reg, int s_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 769 | void MarkDef(RegLocation rl, LIR *start, LIR *finish); |
| 770 | void MarkDefWide(RegLocation rl, LIR *start, LIR *finish); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 771 | void ResetDefLoc(RegLocation rl); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 772 | void ResetDefLocWide(RegLocation rl); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 773 | void ResetDefTracking(); |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 774 | void ClobberAllTemps(); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 775 | void FlushSpecificReg(RegisterInfo* info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 776 | void FlushAllRegs(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 777 | bool RegClassMatches(int reg_class, RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 778 | void MarkLive(RegLocation loc); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 779 | void MarkTemp(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 780 | void UnmarkTemp(RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 781 | void MarkWide(RegStorage reg); |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 782 | void MarkNarrow(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 783 | void MarkClean(RegLocation loc); |
| 784 | void MarkDirty(RegLocation loc); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 785 | void MarkInUse(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 786 | bool CheckCorePoolSanity(); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 787 | virtual RegLocation UpdateLoc(RegLocation loc); |
| 788 | virtual RegLocation UpdateLocWide(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 789 | RegLocation UpdateRawLoc(RegLocation loc); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 790 | |
| 791 | /** |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 792 | * @brief Used to prepare a register location to receive a wide value. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 793 | * @see EvalLoc |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 794 | * @param loc the location where the value will be stored. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 795 | * @param reg_class Type of register needed. |
| 796 | * @param update Whether the liveness information should be updated. |
| 797 | * @return Returns the properly typed temporary in physical register pairs. |
| 798 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 799 | virtual RegLocation EvalLocWide(RegLocation loc, int reg_class, bool update); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 800 | |
| 801 | /** |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 802 | * @brief Used to prepare a register location to receive a value. |
| 803 | * @param loc the location where the value will be stored. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 804 | * @param reg_class Type of register needed. |
| 805 | * @param update Whether the liveness information should be updated. |
| 806 | * @return Returns the properly typed temporary in physical register. |
| 807 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 808 | virtual RegLocation EvalLoc(RegLocation loc, int reg_class, bool update); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 809 | |
buzbee | c729a6b | 2013-09-14 16:04:31 -0700 | [diff] [blame] | 810 | void CountRefs(RefCounts* core_counts, RefCounts* fp_counts, size_t num_regs); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 811 | void DumpCounts(const RefCounts* arr, int size, const char* msg); |
| 812 | void DoPromotion(); |
| 813 | int VRegOffset(int v_reg); |
| 814 | int SRegOffset(int s_reg); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 815 | RegLocation GetReturnWide(RegisterClass reg_class); |
| 816 | RegLocation GetReturn(RegisterClass reg_class); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 817 | RegisterInfo* GetRegInfo(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 818 | |
| 819 | // Shared by all targets - implemented in gen_common.cc. |
Mingyao Yang | 3a74d15 | 2014-04-21 15:39:44 -0700 | [diff] [blame] | 820 | void AddIntrinsicSlowPath(CallInfo* info, LIR* branch, LIR* resume = nullptr); |
Matteo Franchin | c61b3c9 | 2014-06-18 11:52:47 +0100 | [diff] [blame] | 821 | virtual bool HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div, |
| 822 | RegLocation rl_src, RegLocation rl_dest, int lit); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 823 | bool HandleEasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 824 | virtual void HandleSlowPaths(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 825 | void GenBarrier(); |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 826 | void GenDivZeroException(); |
| 827 | // c_code holds condition code that's generated from testing divisor against 0. |
| 828 | void GenDivZeroCheck(ConditionCode c_code); |
| 829 | // reg holds divisor. |
| 830 | void GenDivZeroCheck(RegStorage reg); |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 831 | void GenArrayBoundsCheck(RegStorage index, RegStorage length); |
| 832 | void GenArrayBoundsCheck(int32_t index, RegStorage length); |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 833 | LIR* GenNullCheck(RegStorage reg); |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 834 | void MarkPossibleNullPointerException(int opt_flags); |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 835 | void MarkPossibleNullPointerExceptionAfter(int opt_flags, LIR* after); |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 836 | void MarkPossibleStackOverflowException(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 837 | void ForceImplicitNullCheck(RegStorage reg, int opt_flags); |
| 838 | LIR* GenImmedCheck(ConditionCode c_code, RegStorage reg, int imm_val, ThrowKind kind); |
| 839 | LIR* GenNullCheck(RegStorage m_reg, int opt_flags); |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 840 | LIR* GenExplicitNullCheck(RegStorage m_reg, int opt_flags); |
Dave Allison | 7fb36de | 2014-07-10 02:05:10 +0000 | [diff] [blame^] | 841 | virtual void GenImplicitNullCheck(RegStorage reg, int opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 842 | void GenCompareAndBranch(Instruction::Code opcode, RegLocation rl_src1, |
| 843 | RegLocation rl_src2, LIR* taken, LIR* fall_through); |
| 844 | void GenCompareZeroAndBranch(Instruction::Code opcode, RegLocation rl_src, |
| 845 | LIR* taken, LIR* fall_through); |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 846 | virtual void GenIntToLong(RegLocation rl_dest, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 847 | void GenIntNarrowing(Instruction::Code opcode, RegLocation rl_dest, |
| 848 | RegLocation rl_src); |
| 849 | void GenNewArray(uint32_t type_idx, RegLocation rl_dest, |
| 850 | RegLocation rl_src); |
| 851 | void GenFilledNewArray(CallInfo* info); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 852 | void GenSput(MIR* mir, RegLocation rl_src, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 853 | bool is_long_or_double, bool is_object); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 854 | void GenSget(MIR* mir, RegLocation rl_dest, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 855 | bool is_long_or_double, bool is_object); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 856 | void GenIGet(MIR* mir, int opt_flags, OpSize size, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 857 | RegLocation rl_dest, RegLocation rl_obj, bool is_long_or_double, bool is_object); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 858 | void GenIPut(MIR* mir, int opt_flags, OpSize size, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 859 | RegLocation rl_src, RegLocation rl_obj, bool is_long_or_double, bool is_object); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 860 | void GenArrayObjPut(int opt_flags, RegLocation rl_array, RegLocation rl_index, |
| 861 | RegLocation rl_src); |
| 862 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 863 | void GenConstClass(uint32_t type_idx, RegLocation rl_dest); |
| 864 | void GenConstString(uint32_t string_idx, RegLocation rl_dest); |
| 865 | void GenNewInstance(uint32_t type_idx, RegLocation rl_dest); |
| 866 | void GenThrow(RegLocation rl_src); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 867 | void GenInstanceof(uint32_t type_idx, RegLocation rl_dest, RegLocation rl_src); |
| 868 | void GenCheckCast(uint32_t insn_idx, uint32_t type_idx, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 869 | void GenLong3Addr(OpKind first_op, OpKind second_op, RegLocation rl_dest, |
| 870 | RegLocation rl_src1, RegLocation rl_src2); |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 871 | virtual void GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 872 | RegLocation rl_src1, RegLocation rl_shift); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 873 | void GenArithOpIntLit(Instruction::Code opcode, RegLocation rl_dest, |
| 874 | RegLocation rl_src, int lit); |
| 875 | void GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 876 | RegLocation rl_src1, RegLocation rl_src2); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 877 | template <size_t pointer_size> |
| 878 | void GenConversionCall(ThreadOffset<pointer_size> func_offset, RegLocation rl_dest, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 879 | RegLocation rl_src); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 880 | virtual void GenSuspendTest(int opt_flags); |
| 881 | virtual void GenSuspendTestAndBranch(int opt_flags, LIR* target); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 882 | |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 883 | // This will be overridden by x86 implementation. |
| 884 | virtual void GenConstWide(RegLocation rl_dest, int64_t value); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 885 | virtual void GenArithOpInt(Instruction::Code opcode, RegLocation rl_dest, |
| 886 | RegLocation rl_src1, RegLocation rl_src2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 887 | |
| 888 | // Shared by all targets - implemented in gen_invoke.cc. |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 889 | template <size_t pointer_size> |
| 890 | LIR* CallHelper(RegStorage r_tgt, ThreadOffset<pointer_size> helper_offset, bool safepoint_pc, |
Dave Allison | d6ed642 | 2014-04-09 23:36:15 +0000 | [diff] [blame] | 891 | bool use_link = true); |
| 892 | RegStorage CallHelperSetup(ThreadOffset<4> helper_offset); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 893 | RegStorage CallHelperSetup(ThreadOffset<8> helper_offset); |
| 894 | template <size_t pointer_size> |
| 895 | void CallRuntimeHelper(ThreadOffset<pointer_size> helper_offset, bool safepoint_pc); |
| 896 | template <size_t pointer_size> |
| 897 | void CallRuntimeHelperImm(ThreadOffset<pointer_size> helper_offset, int arg0, bool safepoint_pc); |
| 898 | template <size_t pointer_size> |
| 899 | void CallRuntimeHelperReg(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, bool safepoint_pc); |
| 900 | template <size_t pointer_size> |
| 901 | void CallRuntimeHelperRegLocation(ThreadOffset<pointer_size> helper_offset, RegLocation arg0, |
Ian Rogers | 468532e | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 902 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 903 | template <size_t pointer_size> |
| 904 | void CallRuntimeHelperImmImm(ThreadOffset<pointer_size> helper_offset, int arg0, int arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 905 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 906 | template <size_t pointer_size> |
| 907 | void CallRuntimeHelperImmRegLocation(ThreadOffset<pointer_size> helper_offset, int arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 908 | RegLocation arg1, bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 909 | template <size_t pointer_size> |
| 910 | void CallRuntimeHelperRegLocationImm(ThreadOffset<pointer_size> helper_offset, RegLocation arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 911 | int arg1, bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 912 | template <size_t pointer_size> |
| 913 | void CallRuntimeHelperImmReg(ThreadOffset<pointer_size> helper_offset, int arg0, RegStorage arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 914 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 915 | template <size_t pointer_size> |
| 916 | void CallRuntimeHelperRegImm(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, int arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 917 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 918 | template <size_t pointer_size> |
| 919 | void CallRuntimeHelperImmMethod(ThreadOffset<pointer_size> helper_offset, int arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 920 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 921 | template <size_t pointer_size> |
| 922 | void CallRuntimeHelperRegMethod(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 923 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 924 | template <size_t pointer_size> |
| 925 | void CallRuntimeHelperRegMethodRegLocation(ThreadOffset<pointer_size> helper_offset, |
| 926 | RegStorage arg0, RegLocation arg2, bool safepoint_pc); |
| 927 | template <size_t pointer_size> |
| 928 | void CallRuntimeHelperRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 929 | RegLocation arg0, RegLocation arg1, |
| 930 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 931 | template <size_t pointer_size> |
| 932 | void CallRuntimeHelperRegReg(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, |
| 933 | RegStorage arg1, bool safepoint_pc); |
| 934 | template <size_t pointer_size> |
| 935 | void CallRuntimeHelperRegRegImm(ThreadOffset<pointer_size> helper_offset, RegStorage arg0, |
| 936 | RegStorage arg1, int arg2, bool safepoint_pc); |
| 937 | template <size_t pointer_size> |
| 938 | void CallRuntimeHelperImmMethodRegLocation(ThreadOffset<pointer_size> helper_offset, int arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 939 | RegLocation arg2, bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 940 | template <size_t pointer_size> |
| 941 | void CallRuntimeHelperImmMethodImm(ThreadOffset<pointer_size> helper_offset, int arg0, int arg2, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 942 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 943 | template <size_t pointer_size> |
| 944 | void CallRuntimeHelperImmRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 945 | int arg0, RegLocation arg1, RegLocation arg2, |
| 946 | bool safepoint_pc); |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 947 | template <size_t pointer_size> |
| 948 | void CallRuntimeHelperRegLocationRegLocationRegLocation(ThreadOffset<pointer_size> helper_offset, |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 949 | RegLocation arg0, RegLocation arg1, |
| 950 | RegLocation arg2, |
| 951 | bool safepoint_pc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 952 | void GenInvoke(CallInfo* info); |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 953 | void GenInvokeNoInline(CallInfo* info); |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 954 | virtual void FlushIns(RegLocation* ArgLocs, RegLocation rl_method); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 955 | virtual int GenDalvikArgsNoRange(CallInfo* info, int call_state, LIR** pcrLabel, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 956 | NextCallInsn next_call_insn, |
| 957 | const MethodReference& target_method, |
| 958 | uint32_t vtable_idx, |
| 959 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 960 | bool skip_this); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 961 | virtual int GenDalvikArgsRange(CallInfo* info, int call_state, LIR** pcrLabel, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 962 | NextCallInsn next_call_insn, |
| 963 | const MethodReference& target_method, |
| 964 | uint32_t vtable_idx, |
| 965 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 966 | bool skip_this); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 967 | |
| 968 | /** |
| 969 | * @brief Used to determine the register location of destination. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 970 | * @details This is needed during generation of inline intrinsics because it finds destination |
| 971 | * of return, |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 972 | * either the physical register or the target of move-result. |
| 973 | * @param info Information about the invoke. |
| 974 | * @return Returns the destination location. |
| 975 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 976 | RegLocation InlineTarget(CallInfo* info); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 977 | |
| 978 | /** |
| 979 | * @brief Used to determine the wide register location of destination. |
| 980 | * @see InlineTarget |
| 981 | * @param info Information about the invoke. |
| 982 | * @return Returns the destination location. |
| 983 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 984 | RegLocation InlineTargetWide(CallInfo* info); |
| 985 | |
| 986 | bool GenInlinedCharAt(CallInfo* info); |
| 987 | bool GenInlinedStringIsEmptyOrLength(CallInfo* info, bool is_empty); |
Serban Constantinescu | 23abec9 | 2014-07-02 16:13:38 +0100 | [diff] [blame] | 988 | virtual bool GenInlinedReverseBits(CallInfo* info, OpSize size); |
Vladimir Marko | 6bdf1ff | 2013-10-29 17:40:46 +0000 | [diff] [blame] | 989 | bool GenInlinedReverseBytes(CallInfo* info, OpSize size); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 990 | bool GenInlinedAbsInt(CallInfo* info); |
Serban Constantinescu | 169489b | 2014-06-11 16:43:35 +0100 | [diff] [blame] | 991 | virtual bool GenInlinedAbsLong(CallInfo* info); |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 992 | virtual bool GenInlinedAbsFloat(CallInfo* info); |
| 993 | virtual bool GenInlinedAbsDouble(CallInfo* info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 994 | bool GenInlinedFloatCvt(CallInfo* info); |
| 995 | bool GenInlinedDoubleCvt(CallInfo* info); |
DaniilSokolov | 70c4f06 | 2014-06-24 17:34:00 -0700 | [diff] [blame] | 996 | virtual bool GenInlinedArrayCopyCharArray(CallInfo* info); |
Mark Mendell | 4028a6c | 2014-02-19 20:06:20 -0800 | [diff] [blame] | 997 | virtual bool GenInlinedIndexOf(CallInfo* info, bool zero_based); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 998 | bool GenInlinedStringCompareTo(CallInfo* info); |
| 999 | bool GenInlinedCurrentThread(CallInfo* info); |
| 1000 | bool GenInlinedUnsafeGet(CallInfo* info, bool is_long, bool is_volatile); |
| 1001 | bool GenInlinedUnsafePut(CallInfo* info, bool is_long, bool is_object, |
| 1002 | bool is_volatile, bool is_ordered); |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 1003 | virtual int LoadArgRegs(CallInfo* info, int call_state, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1004 | NextCallInsn next_call_insn, |
| 1005 | const MethodReference& target_method, |
| 1006 | uint32_t vtable_idx, |
| 1007 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 1008 | bool skip_this); |
| 1009 | |
| 1010 | // Shared by all targets - implemented in gen_loadstore.cc. |
| 1011 | RegLocation LoadCurrMethod(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1012 | void LoadCurrMethodDirect(RegStorage r_tgt); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1013 | virtual LIR* LoadConstant(RegStorage r_dest, int value); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1014 | // Natural word size. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1015 | virtual LIR* LoadWordDisp(RegStorage r_base, int displacement, RegStorage r_dest) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1016 | return LoadBaseDisp(r_base, displacement, r_dest, kWord, kNotVolatile); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1017 | } |
| 1018 | // Load 32 bits, regardless of target. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1019 | virtual LIR* Load32Disp(RegStorage r_base, int displacement, RegStorage r_dest) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1020 | return LoadBaseDisp(r_base, displacement, r_dest, k32, kNotVolatile); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1021 | } |
| 1022 | // Load a reference at base + displacement and decompress into register. |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1023 | virtual LIR* LoadRefDisp(RegStorage r_base, int displacement, RegStorage r_dest, |
| 1024 | VolatileKind is_volatile) { |
| 1025 | return LoadBaseDisp(r_base, displacement, r_dest, kReference, is_volatile); |
| 1026 | } |
| 1027 | // Load a reference at base + index and decompress into register. |
Matteo Franchin | 255e014 | 2014-07-04 13:50:41 +0100 | [diff] [blame] | 1028 | virtual LIR* LoadRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, |
| 1029 | int scale) { |
| 1030 | return LoadBaseIndexed(r_base, r_index, r_dest, scale, kReference); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1031 | } |
| 1032 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1033 | virtual RegLocation LoadValue(RegLocation rl_src, RegisterClass op_kind); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 1034 | // Same as above, but derive the target register class from the location record. |
| 1035 | virtual RegLocation LoadValue(RegLocation rl_src); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1036 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1037 | virtual RegLocation LoadValueWide(RegLocation rl_src, RegisterClass op_kind); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1038 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1039 | virtual void LoadValueDirect(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1040 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1041 | virtual void LoadValueDirectFixed(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1042 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1043 | virtual void LoadValueDirectWide(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1044 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1045 | virtual void LoadValueDirectWideFixed(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1046 | // Store an item of natural word size. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1047 | virtual LIR* StoreWordDisp(RegStorage r_base, int displacement, RegStorage r_src) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1048 | return StoreBaseDisp(r_base, displacement, r_src, kWord, kNotVolatile); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1049 | } |
| 1050 | // Store an uncompressed reference into a compressed 32-bit container. |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1051 | virtual LIR* StoreRefDisp(RegStorage r_base, int displacement, RegStorage r_src, |
| 1052 | VolatileKind is_volatile) { |
| 1053 | return StoreBaseDisp(r_base, displacement, r_src, kReference, is_volatile); |
| 1054 | } |
| 1055 | // Store an uncompressed reference into a compressed 32-bit container by index. |
Matteo Franchin | 255e014 | 2014-07-04 13:50:41 +0100 | [diff] [blame] | 1056 | virtual LIR* StoreRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, |
| 1057 | int scale) { |
| 1058 | return StoreBaseIndexed(r_base, r_index, r_src, scale, kReference); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1059 | } |
| 1060 | // Store 32 bits, regardless of target. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1061 | virtual LIR* Store32Disp(RegStorage r_base, int displacement, RegStorage r_src) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1062 | return StoreBaseDisp(r_base, displacement, r_src, k32, kNotVolatile); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1063 | } |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1064 | |
| 1065 | /** |
| 1066 | * @brief Used to do the final store in the destination as per bytecode semantics. |
| 1067 | * @param rl_dest The destination dalvik register location. |
| 1068 | * @param rl_src The source register location. Can be either physical register or dalvik register. |
| 1069 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1070 | virtual void StoreValue(RegLocation rl_dest, RegLocation rl_src); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1071 | |
| 1072 | /** |
| 1073 | * @brief Used to do the final store in a wide destination as per bytecode semantics. |
| 1074 | * @see StoreValue |
| 1075 | * @param rl_dest The destination dalvik register location. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1076 | * @param rl_src The source register location. Can be either physical register or dalvik |
| 1077 | * register. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1078 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1079 | virtual void StoreValueWide(RegLocation rl_dest, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1080 | |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1081 | /** |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 1082 | * @brief Used to do the final store to a destination as per bytecode semantics. |
| 1083 | * @see StoreValue |
| 1084 | * @param rl_dest The destination dalvik register location. |
| 1085 | * @param rl_src The source register location. It must be kLocPhysReg |
| 1086 | * |
| 1087 | * This is used for x86 two operand computations, where we have computed the correct |
| 1088 | * register value that now needs to be properly registered. This is used to avoid an |
| 1089 | * extra register copy that would result if StoreValue was called. |
| 1090 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1091 | virtual void StoreFinalValue(RegLocation rl_dest, RegLocation rl_src); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 1092 | |
| 1093 | /** |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1094 | * @brief Used to do the final store in a wide destination as per bytecode semantics. |
| 1095 | * @see StoreValueWide |
| 1096 | * @param rl_dest The destination dalvik register location. |
| 1097 | * @param rl_src The source register location. It must be kLocPhysReg |
| 1098 | * |
| 1099 | * This is used for x86 two operand computations, where we have computed the correct |
| 1100 | * register values that now need to be properly registered. This is used to avoid an |
| 1101 | * extra pair of register copies that would result if StoreValueWide was called. |
| 1102 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1103 | virtual void StoreFinalValueWide(RegLocation rl_dest, RegLocation rl_src); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1104 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1105 | // Shared by all targets - implemented in mir_to_lir.cc. |
| 1106 | void CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1107 | virtual void HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1108 | bool MethodBlockCodeGen(BasicBlock* bb); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1109 | bool SpecialMIR2LIR(const InlineMethod& special); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1110 | virtual void MethodMIR2LIR(); |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1111 | // Update LIR for verbose listings. |
| 1112 | void UpdateLIROffsets(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1113 | |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1114 | /* |
| 1115 | * @brief Load the address of the dex method into the register. |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1116 | * @param target_method The MethodReference of the method to be invoked. |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1117 | * @param type How the method will be invoked. |
| 1118 | * @param register that will contain the code address. |
| 1119 | * @note register will be passed to TargetReg to get physical register. |
| 1120 | */ |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1121 | void LoadCodeAddress(const MethodReference& target_method, InvokeType type, |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1122 | SpecialTargetRegister symbolic_reg); |
| 1123 | |
| 1124 | /* |
| 1125 | * @brief Load the Method* of a dex method into the register. |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1126 | * @param target_method The MethodReference of the method to be invoked. |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1127 | * @param type How the method will be invoked. |
| 1128 | * @param register that will contain the code address. |
| 1129 | * @note register will be passed to TargetReg to get physical register. |
| 1130 | */ |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1131 | virtual void LoadMethodAddress(const MethodReference& target_method, InvokeType type, |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1132 | SpecialTargetRegister symbolic_reg); |
| 1133 | |
| 1134 | /* |
| 1135 | * @brief Load the Class* of a Dex Class type into the register. |
| 1136 | * @param type How the method will be invoked. |
| 1137 | * @param register that will contain the code address. |
| 1138 | * @note register will be passed to TargetReg to get physical register. |
| 1139 | */ |
| 1140 | virtual void LoadClassType(uint32_t type_idx, SpecialTargetRegister symbolic_reg); |
| 1141 | |
Mark Mendell | 766e929 | 2014-01-27 07:55:47 -0800 | [diff] [blame] | 1142 | // Routines that work for the generic case, but may be overriden by target. |
| 1143 | /* |
| 1144 | * @brief Compare memory to immediate, and branch if condition true. |
| 1145 | * @param cond The condition code that when true will branch to the target. |
| 1146 | * @param temp_reg A temporary register that can be used if compare to memory is not |
| 1147 | * supported by the architecture. |
| 1148 | * @param base_reg The register holding the base address. |
| 1149 | * @param offset The offset from the base. |
| 1150 | * @param check_value The immediate to compare to. |
Dave Allison | 7fb36de | 2014-07-10 02:05:10 +0000 | [diff] [blame^] | 1151 | * @param target branch target (or nullptr) |
| 1152 | * @param compare output for getting LIR for comparison (or nullptr) |
Mark Mendell | 766e929 | 2014-01-27 07:55:47 -0800 | [diff] [blame] | 1153 | * @returns The branch instruction that was generated. |
| 1154 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1155 | virtual LIR* OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg, |
Dave Allison | 7fb36de | 2014-07-10 02:05:10 +0000 | [diff] [blame^] | 1156 | int offset, int check_value, LIR* target, LIR** compare); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1157 | |
| 1158 | // Required for target - codegen helpers. |
buzbee | 11b63d1 | 2013-08-27 07:34:17 -0700 | [diff] [blame] | 1159 | virtual bool SmallLiteralDivRem(Instruction::Code dalvik_opcode, bool is_div, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1160 | RegLocation rl_src, RegLocation rl_dest, int lit) = 0; |
Ian Rogers | e2143c0 | 2014-03-28 08:47:16 -0700 | [diff] [blame] | 1161 | virtual bool EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) = 0; |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 1162 | virtual LIR* CheckSuspendUsingLoad() = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1163 | |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1164 | virtual RegStorage LoadHelper(ThreadOffset<4> offset) = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1165 | virtual RegStorage LoadHelper(ThreadOffset<8> offset) = 0; |
| 1166 | |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 1167 | virtual LIR* LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest, |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1168 | OpSize size, VolatileKind is_volatile) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1169 | virtual LIR* LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, |
| 1170 | int scale, OpSize size) = 0; |
| 1171 | virtual LIR* LoadBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale, |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 1172 | int displacement, RegStorage r_dest, OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1173 | virtual LIR* LoadConstantNoClobber(RegStorage r_dest, int value) = 0; |
| 1174 | virtual LIR* LoadConstantWide(RegStorage r_dest, int64_t value) = 0; |
| 1175 | virtual LIR* StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src, |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1176 | OpSize size, VolatileKind is_volatile) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1177 | virtual LIR* StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, |
| 1178 | int scale, OpSize size) = 0; |
| 1179 | virtual LIR* StoreBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale, |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 1180 | int displacement, RegStorage r_src, OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1181 | virtual void MarkGCCard(RegStorage val_reg, RegStorage tgt_addr_reg) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1182 | |
| 1183 | // Required for target - register utilities. |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1184 | |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 1185 | bool IsSameReg(RegStorage reg1, RegStorage reg2) { |
| 1186 | RegisterInfo* info1 = GetRegInfo(reg1); |
| 1187 | RegisterInfo* info2 = GetRegInfo(reg2); |
| 1188 | return (info1->Master() == info2->Master() && |
| 1189 | (info1->StorageMask() & info2->StorageMask()) != 0); |
| 1190 | } |
| 1191 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1192 | /** |
| 1193 | * @brief Portable way of getting special registers from the backend. |
| 1194 | * @param reg Enumeration describing the purpose of the register. |
| 1195 | * @return Return the #RegStorage corresponding to the given purpose @p reg. |
| 1196 | * @note This function is currently allowed to return any suitable view of the registers |
| 1197 | * (e.g. this could be 64-bit solo or 32-bit solo for 64-bit backends). |
| 1198 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1199 | virtual RegStorage TargetReg(SpecialTargetRegister reg) = 0; |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1200 | |
| 1201 | /** |
| 1202 | * @brief Portable way of getting special registers from the backend. |
| 1203 | * @param reg Enumeration describing the purpose of the register. |
| 1204 | * @param is_wide Whether the view should be 64-bit (rather than 32-bit). |
| 1205 | * @return Return the #RegStorage corresponding to the given purpose @p reg. |
| 1206 | */ |
| 1207 | virtual RegStorage TargetReg(SpecialTargetRegister reg, bool is_wide) { |
| 1208 | return TargetReg(reg); |
| 1209 | } |
| 1210 | |
| 1211 | /** |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 1212 | * @brief Portable way of getting special register pair from the backend. |
| 1213 | * @param reg Enumeration describing the purpose of the first register. |
| 1214 | * @param reg Enumeration describing the purpose of the second register. |
| 1215 | * @return Return the #RegStorage corresponding to the given purpose @p reg. |
| 1216 | */ |
| 1217 | virtual RegStorage TargetReg(SpecialTargetRegister reg1, SpecialTargetRegister reg2) { |
| 1218 | return RegStorage::MakeRegPair(TargetReg(reg1, false), TargetReg(reg2, false)); |
| 1219 | } |
| 1220 | |
| 1221 | /** |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1222 | * @brief Portable way of getting a special register for storing a reference. |
| 1223 | * @see TargetReg() |
| 1224 | */ |
| 1225 | virtual RegStorage TargetRefReg(SpecialTargetRegister reg) { |
| 1226 | return TargetReg(reg); |
| 1227 | } |
| 1228 | |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 1229 | /** |
| 1230 | * @brief Portable way of getting a special register for storing a pointer. |
| 1231 | * @see TargetReg() |
| 1232 | */ |
| 1233 | virtual RegStorage TargetPtrReg(SpecialTargetRegister reg) { |
| 1234 | return TargetReg(reg); |
| 1235 | } |
| 1236 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1237 | // Get a reg storage corresponding to the wide & ref flags of the reg location. |
| 1238 | virtual RegStorage TargetReg(SpecialTargetRegister reg, RegLocation loc) { |
| 1239 | if (loc.ref) { |
| 1240 | return TargetRefReg(reg); |
| 1241 | } else { |
| 1242 | return TargetReg(reg, loc.wide); |
| 1243 | } |
| 1244 | } |
| 1245 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1246 | virtual RegStorage GetArgMappingToPhysicalReg(int arg_num) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1247 | virtual RegLocation GetReturnAlt() = 0; |
| 1248 | virtual RegLocation GetReturnWideAlt() = 0; |
| 1249 | virtual RegLocation LocCReturn() = 0; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 1250 | virtual RegLocation LocCReturnRef() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1251 | virtual RegLocation LocCReturnDouble() = 0; |
| 1252 | virtual RegLocation LocCReturnFloat() = 0; |
| 1253 | virtual RegLocation LocCReturnWide() = 0; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1254 | virtual ResourceMask GetRegMaskCommon(const RegStorage& reg) const = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1255 | virtual void AdjustSpillMask() = 0; |
Vladimir Marko | 31c2aac | 2013-12-09 16:31:19 +0000 | [diff] [blame] | 1256 | virtual void ClobberCallerSave() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1257 | virtual void FreeCallTemps() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1258 | virtual void LockCallTemps() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1259 | virtual void CompilerInitializeRegAlloc() = 0; |
| 1260 | |
| 1261 | // Required for target - miscellaneous. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1262 | virtual void AssembleLIR() = 0; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1263 | virtual void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) = 0; |
| 1264 | virtual void SetupTargetResourceMasks(LIR* lir, uint64_t flags, |
| 1265 | ResourceMask* use_mask, ResourceMask* def_mask) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1266 | virtual const char* GetTargetInstFmt(int opcode) = 0; |
| 1267 | virtual const char* GetTargetInstName(int opcode) = 0; |
| 1268 | virtual std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) = 0; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1269 | virtual ResourceMask GetPCUseDefEncoding() const = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1270 | virtual uint64_t GetTargetInstFlags(int opcode) = 0; |
Ian Rogers | 5aa6e04 | 2014-06-13 16:38:24 -0700 | [diff] [blame] | 1271 | virtual size_t GetInsnSize(LIR* lir) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1272 | virtual bool IsUnconditionalBranch(LIR* lir) = 0; |
| 1273 | |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 1274 | // Check support for volatile load/store of a given size. |
| 1275 | virtual bool SupportsVolatileLoadStore(OpSize size) = 0; |
| 1276 | // Get the register class for load/store of a field. |
| 1277 | virtual RegisterClass RegClassForFieldLoadStore(OpSize size, bool is_volatile) = 0; |
| 1278 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1279 | // Required for target - Dalvik-level generators. |
| 1280 | virtual void GenArithImmOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 1281 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1282 | virtual void GenMulLong(Instruction::Code, |
| 1283 | RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1284 | RegLocation rl_src2) = 0; |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1285 | virtual void GenAddLong(Instruction::Code, |
| 1286 | RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1287 | RegLocation rl_src2) = 0; |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1288 | virtual void GenAndLong(Instruction::Code, |
| 1289 | RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1290 | RegLocation rl_src2) = 0; |
| 1291 | virtual void GenArithOpDouble(Instruction::Code opcode, |
| 1292 | RegLocation rl_dest, RegLocation rl_src1, |
| 1293 | RegLocation rl_src2) = 0; |
| 1294 | virtual void GenArithOpFloat(Instruction::Code opcode, RegLocation rl_dest, |
| 1295 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
| 1296 | virtual void GenCmpFP(Instruction::Code opcode, RegLocation rl_dest, |
| 1297 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
| 1298 | virtual void GenConversion(Instruction::Code opcode, RegLocation rl_dest, |
| 1299 | RegLocation rl_src) = 0; |
Vladimir Marko | 1c282e2 | 2013-11-21 14:49:47 +0000 | [diff] [blame] | 1300 | virtual bool GenInlinedCas(CallInfo* info, bool is_long, bool is_object) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1301 | |
| 1302 | /** |
| 1303 | * @brief Used to generate code for intrinsic java\.lang\.Math methods min and max. |
| 1304 | * @details This is also applicable for java\.lang\.StrictMath since it is a simple algorithm |
| 1305 | * that applies on integers. The generated code will write the smallest or largest value |
| 1306 | * directly into the destination register as specified by the invoke information. |
| 1307 | * @param info Information about the invoke. |
| 1308 | * @param is_min If true generates code that computes minimum. Otherwise computes maximum. |
Serban Constantinescu | 23abec9 | 2014-07-02 16:13:38 +0100 | [diff] [blame] | 1309 | * @param is_long If true the value value is Long. Otherwise the value is Int. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1310 | * @return Returns true if successfully generated |
| 1311 | */ |
Serban Constantinescu | 23abec9 | 2014-07-02 16:13:38 +0100 | [diff] [blame] | 1312 | virtual bool GenInlinedMinMax(CallInfo* info, bool is_min, bool is_long) = 0; |
| 1313 | virtual bool GenInlinedMinMaxFP(CallInfo* info, bool is_min, bool is_double); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1314 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1315 | virtual bool GenInlinedSqrt(CallInfo* info) = 0; |
Vladimir Marko | e508a20 | 2013-11-04 15:24:22 +0000 | [diff] [blame] | 1316 | virtual bool GenInlinedPeek(CallInfo* info, OpSize size) = 0; |
| 1317 | virtual bool GenInlinedPoke(CallInfo* info, OpSize size) = 0; |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 1318 | virtual void GenNotLong(RegLocation rl_dest, RegLocation rl_src) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1319 | virtual void GenNegLong(RegLocation rl_dest, RegLocation rl_src) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1320 | virtual void GenOrLong(Instruction::Code, RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1321 | RegLocation rl_src2) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1322 | virtual void GenSubLong(Instruction::Code, RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1323 | RegLocation rl_src2) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1324 | virtual void GenXorLong(Instruction::Code, RegLocation rl_dest, RegLocation rl_src1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1325 | RegLocation rl_src2) = 0; |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 1326 | virtual void GenDivRemLong(Instruction::Code, RegLocation rl_dest, RegLocation rl_src1, |
| 1327 | RegLocation rl_src2, bool is_div) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1328 | virtual RegLocation GenDivRem(RegLocation rl_dest, RegStorage reg_lo, RegStorage reg_hi, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1329 | bool is_div) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1330 | virtual RegLocation GenDivRemLit(RegLocation rl_dest, RegStorage reg_lo, int lit, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1331 | bool is_div) = 0; |
Mark Mendell | 2bf31e6 | 2014-01-23 12:13:40 -0800 | [diff] [blame] | 1332 | /* |
| 1333 | * @brief Generate an integer div or rem operation by a literal. |
| 1334 | * @param rl_dest Destination Location. |
| 1335 | * @param rl_src1 Numerator Location. |
| 1336 | * @param rl_src2 Divisor Location. |
| 1337 | * @param is_div 'true' if this is a division, 'false' for a remainder. |
| 1338 | * @param check_zero 'true' if an exception should be generated if the divisor is 0. |
| 1339 | */ |
| 1340 | virtual RegLocation GenDivRem(RegLocation rl_dest, RegLocation rl_src1, |
| 1341 | RegLocation rl_src2, bool is_div, bool check_zero) = 0; |
| 1342 | /* |
| 1343 | * @brief Generate an integer div or rem operation by a literal. |
| 1344 | * @param rl_dest Destination Location. |
| 1345 | * @param rl_src Numerator Location. |
| 1346 | * @param lit Divisor. |
| 1347 | * @param is_div 'true' if this is a division, 'false' for a remainder. |
| 1348 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1349 | virtual RegLocation GenDivRemLit(RegLocation rl_dest, RegLocation rl_src1, int lit, |
| 1350 | bool is_div) = 0; |
| 1351 | virtual void GenCmpLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) = 0; |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1352 | |
| 1353 | /** |
| 1354 | * @brief Used for generating code that throws ArithmeticException if both registers are zero. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1355 | * @details This is used for generating DivideByZero checks when divisor is held in two |
| 1356 | * separate registers. |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 1357 | * @param reg The register holding the pair of 32-bit values. |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1358 | */ |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 1359 | virtual void GenDivZeroCheckWide(RegStorage reg) = 0; |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1360 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1361 | virtual void GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1362 | virtual void GenExitSequence() = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1363 | virtual void GenFillArrayData(DexOffset table_offset, RegLocation rl_src) = 0; |
| 1364 | virtual void GenFusedFPCmpBranch(BasicBlock* bb, MIR* mir, bool gt_bias, bool is_double) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1365 | virtual void GenFusedLongCmpBranch(BasicBlock* bb, MIR* mir) = 0; |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1366 | |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1367 | /* |
| 1368 | * @brief Handle Machine Specific MIR Extended opcodes. |
| 1369 | * @param bb The basic block in which the MIR is from. |
| 1370 | * @param mir The MIR whose opcode is not standard extended MIR. |
| 1371 | * @note Base class implementation will abort for unknown opcodes. |
| 1372 | */ |
| 1373 | virtual void GenMachineSpecificExtendedMethodMIR(BasicBlock* bb, MIR* mir); |
| 1374 | |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1375 | /** |
| 1376 | * @brief Lowers the kMirOpSelect MIR into LIR. |
| 1377 | * @param bb The basic block in which the MIR is from. |
| 1378 | * @param mir The MIR whose opcode is kMirOpSelect. |
| 1379 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1380 | virtual void GenSelect(BasicBlock* bb, MIR* mir) = 0; |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1381 | |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1382 | /** |
| 1383 | * @brief Used to generate a memory barrier in an architecture specific way. |
| 1384 | * @details The last generated LIR will be considered for use as barrier. Namely, |
| 1385 | * if the last LIR can be updated in a way where it will serve the semantics of |
| 1386 | * barrier, then it will be used as such. Otherwise, a new LIR will be generated |
| 1387 | * that can keep the semantics. |
| 1388 | * @param barrier_kind The kind of memory barrier to generate. |
Andreas Gampe | b14329f | 2014-05-15 11:16:06 -0700 | [diff] [blame] | 1389 | * @return whether a new instruction was generated. |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1390 | */ |
Andreas Gampe | b14329f | 2014-05-15 11:16:06 -0700 | [diff] [blame] | 1391 | virtual bool GenMemBarrier(MemBarrierKind barrier_kind) = 0; |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1392 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1393 | virtual void GenMoveException(RegLocation rl_dest) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1394 | virtual void GenMultiplyByTwoBitMultiplier(RegLocation rl_src, RegLocation rl_result, int lit, |
| 1395 | int first_bit, int second_bit) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1396 | virtual void GenNegDouble(RegLocation rl_dest, RegLocation rl_src) = 0; |
| 1397 | virtual void GenNegFloat(RegLocation rl_dest, RegLocation rl_src) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1398 | virtual void GenPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) = 0; |
| 1399 | virtual void GenSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1400 | virtual void GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array, |
| 1401 | RegLocation rl_index, RegLocation rl_dest, int scale) = 0; |
| 1402 | virtual void GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array, |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 1403 | RegLocation rl_index, RegLocation rl_src, int scale, |
| 1404 | bool card_mark) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1405 | virtual void GenShiftImmOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 1406 | RegLocation rl_src1, RegLocation rl_shift) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1407 | |
| 1408 | // Required for target - single operation generators. |
| 1409 | virtual LIR* OpUnconditionalBranch(LIR* target) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1410 | virtual LIR* OpCmpBranch(ConditionCode cond, RegStorage src1, RegStorage src2, LIR* target) = 0; |
| 1411 | virtual LIR* OpCmpImmBranch(ConditionCode cond, RegStorage reg, int check_value, |
| 1412 | LIR* target) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1413 | virtual LIR* OpCondBranch(ConditionCode cc, LIR* target) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1414 | virtual LIR* OpDecAndBranch(ConditionCode c_code, RegStorage reg, LIR* target) = 0; |
| 1415 | virtual LIR* OpFpRegCopy(RegStorage r_dest, RegStorage r_src) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1416 | virtual LIR* OpIT(ConditionCode cond, const char* guide) = 0; |
Dave Allison | 3da67a5 | 2014-04-02 17:03:45 -0700 | [diff] [blame] | 1417 | virtual void OpEndIT(LIR* it) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1418 | virtual LIR* OpMem(OpKind op, RegStorage r_base, int disp) = 0; |
| 1419 | virtual LIR* OpPcRelLoad(RegStorage reg, LIR* target) = 0; |
| 1420 | virtual LIR* OpReg(OpKind op, RegStorage r_dest_src) = 0; |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1421 | virtual void OpRegCopy(RegStorage r_dest, RegStorage r_src) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1422 | virtual LIR* OpRegCopyNoInsert(RegStorage r_dest, RegStorage r_src) = 0; |
| 1423 | virtual LIR* OpRegImm(OpKind op, RegStorage r_dest_src1, int value) = 0; |
| 1424 | virtual LIR* OpRegMem(OpKind op, RegStorage r_dest, RegStorage r_base, int offset) = 0; |
| 1425 | virtual LIR* OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1426 | |
| 1427 | /** |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1428 | * @brief Used to generate an LIR that does a load from mem to reg. |
| 1429 | * @param r_dest The destination physical register. |
| 1430 | * @param r_base The base physical register for memory operand. |
| 1431 | * @param offset The displacement for memory operand. |
| 1432 | * @param move_type Specification on the move desired (size, alignment, register kind). |
| 1433 | * @return Returns the generate move LIR. |
| 1434 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1435 | virtual LIR* OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, |
| 1436 | MoveType move_type) = 0; |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1437 | |
| 1438 | /** |
| 1439 | * @brief Used to generate an LIR that does a store from reg to mem. |
| 1440 | * @param r_base The base physical register for memory operand. |
| 1441 | * @param offset The displacement for memory operand. |
| 1442 | * @param r_src The destination physical register. |
| 1443 | * @param bytes_to_move The number of bytes to move. |
| 1444 | * @param is_aligned Whether the memory location is known to be aligned. |
| 1445 | * @return Returns the generate move LIR. |
| 1446 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1447 | virtual LIR* OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, |
| 1448 | MoveType move_type) = 0; |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1449 | |
| 1450 | /** |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1451 | * @brief Used for generating a conditional register to register operation. |
| 1452 | * @param op The opcode kind. |
| 1453 | * @param cc The condition code that when true will perform the opcode. |
| 1454 | * @param r_dest The destination physical register. |
| 1455 | * @param r_src The source physical register. |
| 1456 | * @return Returns the newly created LIR or null in case of creation failure. |
| 1457 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1458 | virtual LIR* OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1459 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1460 | virtual LIR* OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value) = 0; |
| 1461 | virtual LIR* OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, |
| 1462 | RegStorage r_src2) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1463 | virtual LIR* OpTestSuspend(LIR* target) = 0; |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1464 | virtual LIR* OpThreadMem(OpKind op, ThreadOffset<4> thread_offset) = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1465 | virtual LIR* OpThreadMem(OpKind op, ThreadOffset<8> thread_offset) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1466 | virtual LIR* OpVldm(RegStorage r_base, int count) = 0; |
| 1467 | virtual LIR* OpVstm(RegStorage r_base, int count) = 0; |
| 1468 | virtual void OpLea(RegStorage r_base, RegStorage reg1, RegStorage reg2, int scale, |
| 1469 | int offset) = 0; |
| 1470 | virtual void OpRegCopyWide(RegStorage dest, RegStorage src) = 0; |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1471 | virtual void OpTlsCmp(ThreadOffset<4> offset, int val) = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1472 | virtual void OpTlsCmp(ThreadOffset<8> offset, int val) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1473 | virtual bool InexpensiveConstantInt(int32_t value) = 0; |
| 1474 | virtual bool InexpensiveConstantFloat(int32_t value) = 0; |
| 1475 | virtual bool InexpensiveConstantLong(int64_t value) = 0; |
| 1476 | virtual bool InexpensiveConstantDouble(int64_t value) = 0; |
| 1477 | |
Ian Rogers | d9c4fc9 | 2013-10-01 19:45:43 -0700 | [diff] [blame] | 1478 | // May be optimized by targets. |
| 1479 | virtual void GenMonitorEnter(int opt_flags, RegLocation rl_src); |
| 1480 | virtual void GenMonitorExit(int opt_flags, RegLocation rl_src); |
| 1481 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1482 | // Temp workaround |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1483 | void Workaround7250540(RegLocation rl_dest, RegStorage zero_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1484 | |
| 1485 | protected: |
| 1486 | Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena); |
| 1487 | |
| 1488 | CompilationUnit* GetCompilationUnit() { |
| 1489 | return cu_; |
| 1490 | } |
Mark Mendell | 4708dcd | 2014-01-22 09:05:18 -0800 | [diff] [blame] | 1491 | /* |
| 1492 | * @brief Returns the index of the lowest set bit in 'x'. |
| 1493 | * @param x Value to be examined. |
| 1494 | * @returns The bit number of the lowest bit set in the value. |
| 1495 | */ |
| 1496 | int32_t LowestSetBit(uint64_t x); |
| 1497 | /* |
| 1498 | * @brief Is this value a power of two? |
| 1499 | * @param x Value to be examined. |
| 1500 | * @returns 'true' if only 1 bit is set in the value. |
| 1501 | */ |
| 1502 | bool IsPowerOfTwo(uint64_t x); |
| 1503 | /* |
| 1504 | * @brief Do these SRs overlap? |
| 1505 | * @param rl_op1 One RegLocation |
| 1506 | * @param rl_op2 The other RegLocation |
| 1507 | * @return 'true' if the VR pairs overlap |
| 1508 | * |
| 1509 | * Check to see if a result pair has a misaligned overlap with an operand pair. This |
| 1510 | * is not usual for dx to generate, but it is legal (for now). In a future rev of |
| 1511 | * dex, we'll want to make this case illegal. |
| 1512 | */ |
| 1513 | bool BadOverlap(RegLocation rl_op1, RegLocation rl_op2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1514 | |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1515 | /* |
| 1516 | * @brief Force a location (in a register) into a temporary register |
| 1517 | * @param loc location of result |
| 1518 | * @returns update location |
| 1519 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1520 | virtual RegLocation ForceTemp(RegLocation loc); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1521 | |
| 1522 | /* |
| 1523 | * @brief Force a wide location (in registers) into temporary registers |
| 1524 | * @param loc location of result |
| 1525 | * @returns update location |
| 1526 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1527 | virtual RegLocation ForceTempWide(RegLocation loc); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1528 | |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 1529 | static constexpr OpSize LoadStoreOpSize(bool wide, bool ref) { |
| 1530 | return wide ? k64 : ref ? kReference : k32; |
| 1531 | } |
| 1532 | |
Mark Mendell | df8ee2e | 2014-01-27 16:37:47 -0800 | [diff] [blame] | 1533 | virtual void GenInstanceofFinal(bool use_declaring_class, uint32_t type_idx, |
| 1534 | RegLocation rl_dest, RegLocation rl_src); |
| 1535 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1536 | void AddSlowPath(LIRSlowPath* slowpath); |
| 1537 | |
Mark Mendell | 6607d97 | 2014-02-10 06:54:18 -0800 | [diff] [blame] | 1538 | virtual void GenInstanceofCallingHelper(bool needs_access_check, bool type_known_final, |
| 1539 | bool type_known_abstract, bool use_declaring_class, |
| 1540 | bool can_assume_type_is_in_dex_cache, |
| 1541 | uint32_t type_idx, RegLocation rl_dest, |
| 1542 | RegLocation rl_src); |
Mark Mendell | ae9fd93 | 2014-02-10 16:14:35 -0800 | [diff] [blame] | 1543 | /* |
| 1544 | * @brief Generate the debug_frame FDE information if possible. |
| 1545 | * @returns pointer to vector containg CFE information, or NULL. |
| 1546 | */ |
| 1547 | virtual std::vector<uint8_t>* ReturnCallFrameInformation(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1548 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1549 | /** |
| 1550 | * @brief Used to insert marker that can be used to associate MIR with LIR. |
| 1551 | * @details Only inserts marker if verbosity is enabled. |
| 1552 | * @param mir The mir that is currently being generated. |
| 1553 | */ |
| 1554 | void GenPrintLabel(MIR* mir); |
| 1555 | |
| 1556 | /** |
| 1557 | * @brief Used to generate return sequence when there is no frame. |
| 1558 | * @details Assumes that the return registers have already been populated. |
| 1559 | */ |
| 1560 | virtual void GenSpecialExitSequence() = 0; |
| 1561 | |
| 1562 | /** |
| 1563 | * @brief Used to generate code for special methods that are known to be |
| 1564 | * small enough to work in frameless mode. |
| 1565 | * @param bb The basic block of the first MIR. |
| 1566 | * @param mir The first MIR of the special method. |
| 1567 | * @param special Information about the special method. |
| 1568 | * @return Returns whether or not this was handled successfully. Returns false |
| 1569 | * if caller should punt to normal MIR2LIR conversion. |
| 1570 | */ |
| 1571 | virtual bool GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special); |
| 1572 | |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1573 | protected: |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1574 | void ClobberBody(RegisterInfo* p); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1575 | void SetCurrentDexPc(DexOffset dexpc) { |
| 1576 | current_dalvik_offset_ = dexpc; |
| 1577 | } |
| 1578 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1579 | /** |
| 1580 | * @brief Used to lock register if argument at in_position was passed that way. |
| 1581 | * @details Does nothing if the argument is passed via stack. |
| 1582 | * @param in_position The argument number whose register to lock. |
| 1583 | * @param wide Whether the argument is wide. |
| 1584 | */ |
| 1585 | void LockArg(int in_position, bool wide = false); |
| 1586 | |
| 1587 | /** |
| 1588 | * @brief Used to load VR argument to a physical register. |
| 1589 | * @details The load is only done if the argument is not already in physical register. |
| 1590 | * LockArg must have been previously called. |
| 1591 | * @param in_position The argument number to load. |
| 1592 | * @param wide Whether the argument is 64-bit or not. |
| 1593 | * @return Returns the register (or register pair) for the loaded argument. |
| 1594 | */ |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 1595 | RegStorage LoadArg(int in_position, RegisterClass reg_class, bool wide = false); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1596 | |
| 1597 | /** |
| 1598 | * @brief Used to load a VR argument directly to a specified register location. |
| 1599 | * @param in_position The argument number to place in register. |
| 1600 | * @param rl_dest The register location where to place argument. |
| 1601 | */ |
| 1602 | void LoadArgDirect(int in_position, RegLocation rl_dest); |
| 1603 | |
| 1604 | /** |
| 1605 | * @brief Used to generate LIR for special getter method. |
| 1606 | * @param mir The mir that represents the iget. |
| 1607 | * @param special Information about the special getter method. |
| 1608 | * @return Returns whether LIR was successfully generated. |
| 1609 | */ |
| 1610 | bool GenSpecialIGet(MIR* mir, const InlineMethod& special); |
| 1611 | |
| 1612 | /** |
| 1613 | * @brief Used to generate LIR for special setter method. |
| 1614 | * @param mir The mir that represents the iput. |
| 1615 | * @param special Information about the special setter method. |
| 1616 | * @return Returns whether LIR was successfully generated. |
| 1617 | */ |
| 1618 | bool GenSpecialIPut(MIR* mir, const InlineMethod& special); |
| 1619 | |
| 1620 | /** |
| 1621 | * @brief Used to generate LIR for special return-args method. |
| 1622 | * @param mir The mir that represents the return of argument. |
| 1623 | * @param special Information about the special return-args method. |
| 1624 | * @return Returns whether LIR was successfully generated. |
| 1625 | */ |
| 1626 | bool GenSpecialIdentity(MIR* mir, const InlineMethod& special); |
| 1627 | |
Mingyao Yang | 4289456 | 2014-04-07 12:42:16 -0700 | [diff] [blame] | 1628 | void AddDivZeroCheckSlowPath(LIR* branch); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1629 | |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 1630 | // Copy arg0 and arg1 to kArg0 and kArg1 safely, possibly using |
| 1631 | // kArg2 as temp. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1632 | virtual void CopyToArgumentRegs(RegStorage arg0, RegStorage arg1); |
| 1633 | |
| 1634 | /** |
| 1635 | * @brief Load Constant into RegLocation |
| 1636 | * @param rl_dest Destination RegLocation |
| 1637 | * @param value Constant value |
| 1638 | */ |
| 1639 | virtual void GenConst(RegLocation rl_dest, int value); |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 1640 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1641 | enum class WidenessCheck { // private |
| 1642 | kIgnoreWide, |
| 1643 | kCheckWide, |
| 1644 | kCheckNotWide |
| 1645 | }; |
| 1646 | |
| 1647 | enum class RefCheck { // private |
| 1648 | kIgnoreRef, |
| 1649 | kCheckRef, |
| 1650 | kCheckNotRef |
| 1651 | }; |
| 1652 | |
| 1653 | enum class FPCheck { // private |
| 1654 | kIgnoreFP, |
| 1655 | kCheckFP, |
| 1656 | kCheckNotFP |
| 1657 | }; |
| 1658 | |
| 1659 | /** |
| 1660 | * Check whether a reg storage seems well-formed, that is, if a reg storage is valid, |
| 1661 | * that it has the expected form for the flags. |
| 1662 | * A flag value of 0 means ignore. A flag value of -1 means false. A flag value of 1 means true. |
| 1663 | */ |
| 1664 | void CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp, bool fail, |
| 1665 | bool report) |
| 1666 | const; |
| 1667 | |
| 1668 | /** |
| 1669 | * Check whether a reg location seems well-formed, that is, if a reg storage is encoded, |
| 1670 | * that it has the expected size. |
| 1671 | */ |
| 1672 | void CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const; |
| 1673 | |
| 1674 | // See CheckRegStorageImpl. Will print or fail depending on kFailOnSizeError and |
| 1675 | // kReportSizeError. |
| 1676 | void CheckRegStorage(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp) const; |
| 1677 | // See CheckRegLocationImpl. |
| 1678 | void CheckRegLocation(RegLocation rl) const; |
| 1679 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1680 | public: |
| 1681 | // TODO: add accessors for these. |
| 1682 | LIR* literal_list_; // Constants. |
| 1683 | LIR* method_literal_list_; // Method literals requiring patching. |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 1684 | LIR* class_literal_list_; // Class literals requiring patching. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1685 | LIR* code_literal_list_; // Code literals requiring patching. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1686 | LIR* first_fixup_; // Doubly-linked list of LIR nodes requiring fixups. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1687 | |
| 1688 | protected: |
| 1689 | CompilationUnit* const cu_; |
| 1690 | MIRGraph* const mir_graph_; |
| 1691 | GrowableArray<SwitchTable*> switch_tables_; |
| 1692 | GrowableArray<FillArrayData*> fill_array_data_; |
buzbee | bd663de | 2013-09-10 15:41:31 -0700 | [diff] [blame] | 1693 | GrowableArray<RegisterInfo*> tempreg_info_; |
| 1694 | GrowableArray<RegisterInfo*> reginfo_map_; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1695 | GrowableArray<void*> pointer_storage_; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1696 | CodeOffset current_code_offset_; // Working byte offset of machine instructons. |
| 1697 | CodeOffset data_offset_; // starting offset of literal pool. |
| 1698 | size_t total_size_; // header + code size. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1699 | LIR* block_label_list_; |
| 1700 | PromotionMap* promotion_map_; |
| 1701 | /* |
| 1702 | * TODO: The code generation utilities don't have a built-in |
| 1703 | * mechanism to propagate the original Dalvik opcode address to the |
| 1704 | * associated generated instructions. For the trace compiler, this wasn't |
| 1705 | * necessary because the interpreter handled all throws and debugging |
| 1706 | * requests. For now we'll handle this by placing the Dalvik offset |
| 1707 | * in the CompilationUnit struct before codegen for each instruction. |
| 1708 | * The low-level LIR creation utilites will pull it from here. Rework this. |
| 1709 | */ |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1710 | DexOffset current_dalvik_offset_; |
| 1711 | size_t estimated_native_code_size_; // Just an estimate; used to reserve code_buffer_ size. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1712 | RegisterPool* reg_pool_; |
| 1713 | /* |
| 1714 | * Sanity checking for the register temp tracking. The same ssa |
| 1715 | * name should never be associated with one temp register per |
| 1716 | * instruction compilation. |
| 1717 | */ |
| 1718 | int live_sreg_; |
| 1719 | CodeBuffer code_buffer_; |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 1720 | // The encoding mapping table data (dex -> pc offset and pc offset -> dex) with a size prefix. |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 1721 | std::vector<uint8_t> encoded_mapping_table_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1722 | std::vector<uint32_t> core_vmap_table_; |
| 1723 | std::vector<uint32_t> fp_vmap_table_; |
| 1724 | std::vector<uint8_t> native_gc_map_; |
| 1725 | int num_core_spills_; |
| 1726 | int num_fp_spills_; |
| 1727 | int frame_size_; |
| 1728 | unsigned int core_spill_mask_; |
| 1729 | unsigned int fp_spill_mask_; |
| 1730 | LIR* first_lir_insn_; |
| 1731 | LIR* last_lir_insn_; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1732 | |
| 1733 | GrowableArray<LIRSlowPath*> slow_paths_; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1734 | |
| 1735 | // The memory reference type for new LIRs. |
| 1736 | // NOTE: Passing this as an explicit parameter by all functions that directly or indirectly |
| 1737 | // invoke RawLIR() would clutter the code and reduce the readability. |
| 1738 | ResourceMask::ResourceBit mem_ref_type_; |
| 1739 | |
| 1740 | // Each resource mask now takes 16-bytes, so having both use/def masks directly in a LIR |
| 1741 | // would consume 32 bytes per LIR. Instead, the LIR now holds only pointers to the masks |
| 1742 | // (i.e. 8 bytes on 32-bit arch, 16 bytes on 64-bit arch) and we use ResourceMaskCache |
| 1743 | // to deduplicate the masks. |
| 1744 | ResourceMaskCache mask_cache_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1745 | }; // Class Mir2Lir |
| 1746 | |
| 1747 | } // namespace art |
| 1748 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 1749 | #endif // ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |