blob: d81a7b538264c72f0ec8da05c419d6bc7cd98e61 [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_H_
19
Ian Rogersd582fa42014-11-05 23:46:43 -080020#include "arch/instruction_set.h"
Calin Juravle34166012014-12-19 17:22:29 +000021#include "arch/instruction_set_features.h"
Vladimir Markof9f64412015-09-02 14:05:49 +010022#include "base/arena_containers.h"
23#include "base/arena_object.h"
Vladimir Markoca1e0382018-04-11 09:58:41 +000024#include "base/array_ref.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010025#include "base/bit_field.h"
Vladimir Marko70e97462016-08-09 11:04:26 +010026#include "base/bit_utils.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070027#include "base/enums.h"
David Sehr1979c642018-04-26 14:41:18 -070028#include "base/globals.h"
David Sehr1ce2b3b2018-04-05 11:02:03 -070029#include "base/memory_region.h"
Vladimir Marko98873af2020-12-16 12:10:03 +000030#include "class_root.h"
David Sehr312f3b22018-03-19 08:39:26 -070031#include "dex/string_reference.h"
32#include "dex/type_reference.h"
Alexandre Rameseb7b7392015-06-19 14:47:01 +010033#include "graph_visualizer.h"
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +010034#include "locations.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000035#include "nodes.h"
Serban Constantinescuecc43662015-08-13 13:33:12 +010036#include "optimizing_compiler_stats.h"
Mathieu Chartier3af00dc2016-11-10 11:25:57 -080037#include "read_barrier_option.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070038#include "stack.h"
Ulya Trafimovichc8451cb2021-06-02 17:35:16 +010039#include "utils/assembler.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070040#include "utils/label.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000041
Vladimir Marko0a516052019-10-14 13:00:44 +000042namespace art {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000043
Roland Levillain6d0e4832014-11-27 18:31:21 +000044// Binary encoding of 2^32 for type double.
45static int64_t constexpr k2Pow32EncodingForDouble = INT64_C(0x41F0000000000000);
46// Binary encoding of 2^31 for type double.
47static int64_t constexpr k2Pow31EncodingForDouble = INT64_C(0x41E0000000000000);
48
Mark Mendelle82549b2015-05-06 10:55:34 -040049// Minimum value for a primitive integer.
50static int32_t constexpr kPrimIntMin = 0x80000000;
51// Minimum value for a primitive long.
52static int64_t constexpr kPrimLongMin = INT64_C(0x8000000000000000);
53
Roland Levillain3f8f9362014-12-02 17:45:01 +000054// Maximum value for a primitive integer.
55static int32_t constexpr kPrimIntMax = 0x7fffffff;
Roland Levillain624279f2014-12-04 11:54:28 +000056// Maximum value for a primitive long.
Mark Mendelle82549b2015-05-06 10:55:34 -040057static int64_t constexpr kPrimLongMax = INT64_C(0x7fffffffffffffff);
Roland Levillain3f8f9362014-12-02 17:45:01 +000058
Mathieu Chartier3af00dc2016-11-10 11:25:57 -080059static constexpr ReadBarrierOption kCompilerReadBarrierOption =
60 kEmitCompilerReadBarrier ? kWithReadBarrier : kWithoutReadBarrier;
61
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +010062class Assembler;
Nicolas Geoffraye5038322014-07-04 09:41:32 +010063class CodeGenerator;
Vladimir Marko3a21e382016-09-02 12:38:38 +010064class CompilerOptions;
Vladimir Marko174b2e22017-10-12 13:34:49 +010065class StackMapStream;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +000066class ParallelMoveResolver;
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000067
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010068namespace linker {
69class LinkerPatch;
70} // namespace linker
71
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000072class CodeAllocator {
73 public:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 CodeAllocator() {}
75 virtual ~CodeAllocator() {}
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000076
77 virtual uint8_t* Allocate(size_t size) = 0;
Vladimir Markoca1e0382018-04-11 09:58:41 +000078 virtual ArrayRef<const uint8_t> GetMemory() const = 0;
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000079
80 private:
81 DISALLOW_COPY_AND_ASSIGN(CodeAllocator);
82};
83
Alexandre Ramesc01a6642016-04-15 11:54:06 +010084class SlowPathCode : public DeletableArenaObject<kArenaAllocSlowPaths> {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit SlowPathCode(HInstruction* instruction) : instruction_(instruction) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +000087 for (size_t i = 0; i < kMaximumNumberOfExpectedRegisters; ++i) {
88 saved_core_stack_offsets_[i] = kRegisterNotSaved;
89 saved_fpu_stack_offsets_[i] = kRegisterNotSaved;
90 }
91 }
92
Nicolas Geoffraye5038322014-07-04 09:41:32 +010093 virtual ~SlowPathCode() {}
94
Nicolas Geoffraye5038322014-07-04 09:41:32 +010095 virtual void EmitNativeCode(CodeGenerator* codegen) = 0;
96
Roland Levillain4359e612016-07-20 11:32:19 +010097 // Save live core and floating-point caller-save registers and
98 // update the stack mask in `locations` for registers holding object
99 // references.
Zheng Xuda403092015-04-24 17:35:39 +0800100 virtual void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations);
Roland Levillain4359e612016-07-20 11:32:19 +0100101 // Restore live core and floating-point caller-save registers.
Zheng Xuda403092015-04-24 17:35:39 +0800102 virtual void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000103
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000104 bool IsCoreRegisterSaved(int reg) const {
105 return saved_core_stack_offsets_[reg] != kRegisterNotSaved;
106 }
107
108 bool IsFpuRegisterSaved(int reg) const {
109 return saved_fpu_stack_offsets_[reg] != kRegisterNotSaved;
110 }
111
112 uint32_t GetStackOffsetOfCoreRegister(int reg) const {
113 return saved_core_stack_offsets_[reg];
114 }
115
116 uint32_t GetStackOffsetOfFpuRegister(int reg) const {
117 return saved_fpu_stack_offsets_[reg];
118 }
119
Alexandre Rames8158f282015-08-07 10:26:17 +0100120 virtual bool IsFatal() const { return false; }
121
Alexandre Rames9931f312015-06-19 14:47:01 +0100122 virtual const char* GetDescription() const = 0;
123
Andreas Gampe85b62f22015-09-09 13:15:38 -0700124 Label* GetEntryLabel() { return &entry_label_; }
125 Label* GetExitLabel() { return &exit_label_; }
126
David Srbeckyd28f4a02016-03-14 17:14:24 +0000127 HInstruction* GetInstruction() const {
128 return instruction_;
129 }
130
David Srbecky9cd6d372016-02-09 15:24:47 +0000131 uint32_t GetDexPc() const {
132 return instruction_ != nullptr ? instruction_->GetDexPc() : kNoDexPc;
133 }
134
Zheng Xuda403092015-04-24 17:35:39 +0800135 protected:
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000136 static constexpr size_t kMaximumNumberOfExpectedRegisters = 32;
137 static constexpr uint32_t kRegisterNotSaved = -1;
David Srbecky9cd6d372016-02-09 15:24:47 +0000138 // The instruction where this slow path is happening.
139 HInstruction* instruction_;
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000140 uint32_t saved_core_stack_offsets_[kMaximumNumberOfExpectedRegisters];
141 uint32_t saved_fpu_stack_offsets_[kMaximumNumberOfExpectedRegisters];
Zheng Xuda403092015-04-24 17:35:39 +0800142
143 private:
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144 Label entry_label_;
145 Label exit_label_;
146
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100147 DISALLOW_COPY_AND_ASSIGN(SlowPathCode);
148};
149
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100150class InvokeDexCallingConventionVisitor {
151 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100152 virtual Location GetNextLocation(DataType::Type type) = 0;
153 virtual Location GetReturnLocation(DataType::Type type) const = 0;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100154 virtual Location GetMethodLocation() const = 0;
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100155
156 protected:
157 InvokeDexCallingConventionVisitor() {}
158 virtual ~InvokeDexCallingConventionVisitor() {}
159
160 // The current index for core registers.
161 uint32_t gp_index_ = 0u;
162 // The current index for floating-point registers.
163 uint32_t float_index_ = 0u;
164 // The current stack index.
165 uint32_t stack_index_ = 0u;
166
167 private:
168 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitor);
169};
170
Calin Juravlee460d1d2015-09-29 04:52:17 +0100171class FieldAccessCallingConvention {
172 public:
173 virtual Location GetObjectLocation() const = 0;
174 virtual Location GetFieldIndexLocation() const = 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100175 virtual Location GetReturnLocation(DataType::Type type) const = 0;
176 virtual Location GetSetValueLocation(DataType::Type type, bool is_instance) const = 0;
177 virtual Location GetFpuLocation(DataType::Type type) const = 0;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100178 virtual ~FieldAccessCallingConvention() {}
179
180 protected:
181 FieldAccessCallingConvention() {}
182
183 private:
184 DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConvention);
185};
186
Vladimir Markod58b8372016-04-12 18:51:43 +0100187class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000188 public:
David Brazdil58282f42016-01-14 12:45:10 +0000189 // Compiles the graph to executable instructions.
190 void Compile(CodeAllocator* allocator);
Vladimir Markod58b8372016-04-12 18:51:43 +0100191 static std::unique_ptr<CodeGenerator> Create(HGraph* graph,
Vladimir Markod58b8372016-04-12 18:51:43 +0100192 const CompilerOptions& compiler_options,
193 OptimizingCompilerStats* stats = nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100194 virtual ~CodeGenerator();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000195
Vladimir Markodc151b22015-10-15 18:02:30 +0100196 // Get the graph. This is the outermost graph, never the graph of a method being inlined.
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000197 HGraph* GetGraph() const { return graph_; }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000198
Nicolas Geoffraydc23d832015-02-16 11:15:43 +0000199 HBasicBlock* GetNextBlockToEmit() const;
200 HBasicBlock* FirstNonEmptyBlock(HBasicBlock* block) const;
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +0000201 bool GoesToNextBlock(HBasicBlock* current, HBasicBlock* next) const;
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000202
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100203 size_t GetStackSlotOfParameter(HParameterValue* parameter) const {
204 // Note that this follows the current calling convention.
205 return GetFrameSize()
Andreas Gampe542451c2016-07-26 09:02:02 -0700206 + static_cast<size_t>(InstructionSetPointerSize(GetInstructionSet())) // Art method
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +0100207 + parameter->GetIndex() * kVRegSize;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100208 }
209
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100210 virtual void Initialize() = 0;
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000211 virtual void Finalize(CodeAllocator* allocator);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100212 virtual void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000213 virtual bool NeedsThunkCode(const linker::LinkerPatch& patch) const;
214 virtual void EmitThunkCode(const linker::LinkerPatch& patch,
215 /*out*/ ArenaVector<uint8_t>* code,
216 /*out*/ std::string* debug_name);
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000217 virtual void GenerateFrameEntry() = 0;
218 virtual void GenerateFrameExit() = 0;
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100219 virtual void Bind(HBasicBlock* block) = 0;
Calin Juravle175dc732015-08-25 15:42:32 +0100220 virtual void MoveConstant(Location destination, int32_t value) = 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100221 virtual void MoveLocation(Location dst, Location src, DataType::Type dst_type) = 0;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100222 virtual void AddLocationAsTemp(Location location, LocationSummary* locations) = 0;
223
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000224 virtual Assembler* GetAssembler() = 0;
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100225 virtual const Assembler& GetAssembler() const = 0;
Nicolas Geoffray707c8092014-04-04 10:50:14 +0100226 virtual size_t GetWordSize() const = 0;
Artem Serov6a0b6572019-07-26 20:38:37 +0100227
Artem Serov1a719e42019-07-18 14:24:55 +0100228 // Returns whether the target supports predicated SIMD instructions.
229 virtual bool SupportsPredicatedSIMD() const { return false; }
230
Artem Serov6a0b6572019-07-26 20:38:37 +0100231 // Get FP register width in bytes for spilling/restoring in the slow paths.
232 //
233 // Note: In SIMD graphs this should return SIMD register width as all FP and SIMD registers
234 // alias and live SIMD registers are forced to be spilled in full size in the slow paths.
235 virtual size_t GetSlowPathFPWidth() const {
236 // Default implementation.
237 return GetCalleePreservedFPWidth();
238 }
239
240 // Get FP register width required to be preserved by the target ABI.
241 virtual size_t GetCalleePreservedFPWidth() const = 0;
242
Artem Serovc8150b52019-07-31 18:28:00 +0100243 // Get the size of the target SIMD register in bytes.
244 virtual size_t GetSIMDRegisterWidth() const = 0;
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100245 virtual uintptr_t GetAddressOf(HBasicBlock* block) = 0;
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +0000246 void InitializeCodeGeneration(size_t number_of_spill_slots,
Vladimir Marko70e97462016-08-09 11:04:26 +0100247 size_t maximum_safepoint_spill_size,
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +0000248 size_t number_of_out_slots,
Vladimir Markofa6b93c2015-09-15 10:15:55 +0100249 const ArenaVector<HBasicBlock*>& block_order);
Alexandre Rames68bd9b92016-07-15 17:41:13 +0100250 // Backends can override this as necessary. For most, no special alignment is required.
251 virtual uint32_t GetPreferredSlotsAlignment() const { return 1; }
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000252
253 uint32_t GetFrameSize() const { return frame_size_; }
254 void SetFrameSize(uint32_t size) { frame_size_ = size; }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000255 uint32_t GetCoreSpillMask() const { return core_spill_mask_; }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000256 uint32_t GetFpuSpillMask() const { return fpu_spill_mask_; }
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000257
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100258 size_t GetNumberOfCoreRegisters() const { return number_of_core_registers_; }
259 size_t GetNumberOfFloatingPointRegisters() const { return number_of_fpu_registers_; }
David Brazdil58282f42016-01-14 12:45:10 +0000260 virtual void SetupBlockedRegisters() const = 0;
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100261
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000262 virtual void ComputeSpillMask() {
263 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
264 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
265 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
266 }
267
268 static uint32_t ComputeRegisterMask(const int* registers, size_t length) {
269 uint32_t mask = 0;
270 for (size_t i = 0, e = length; i < e; ++i) {
271 mask |= (1 << registers[i]);
272 }
273 return mask;
274 }
275
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100276 virtual void DumpCoreRegister(std::ostream& stream, int reg) const = 0;
277 virtual void DumpFloatingPointRegister(std::ostream& stream, int reg) const = 0;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100278 virtual InstructionSet GetInstructionSet() const = 0;
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000279
280 const CompilerOptions& GetCompilerOptions() const { return compiler_options_; }
281
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100282 // Saves the register in the stack. Returns the size taken on stack.
283 virtual size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) = 0;
284 // Restores the register from the stack. Returns the size taken on stack.
285 virtual size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) = 0;
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000286
287 virtual size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) = 0;
288 virtual size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) = 0;
289
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100290 virtual bool NeedsTwoRegisters(DataType::Type type) const = 0;
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000291 // Returns whether we should split long moves in parallel moves.
292 virtual bool ShouldSplitLongMoves() const { return false; }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100293
Roland Levillain0d5a2812015-11-13 10:07:31 +0000294 size_t GetNumberOfCoreCalleeSaveRegisters() const {
295 return POPCOUNT(core_callee_save_mask_);
296 }
297
298 size_t GetNumberOfCoreCallerSaveRegisters() const {
299 DCHECK_GE(GetNumberOfCoreRegisters(), GetNumberOfCoreCalleeSaveRegisters());
300 return GetNumberOfCoreRegisters() - GetNumberOfCoreCalleeSaveRegisters();
301 }
302
Nicolas Geoffray98893962015-01-21 12:32:32 +0000303 bool IsCoreCalleeSaveRegister(int reg) const {
304 return (core_callee_save_mask_ & (1 << reg)) != 0;
305 }
306
307 bool IsFloatingPointCalleeSaveRegister(int reg) const {
308 return (fpu_callee_save_mask_ & (1 << reg)) != 0;
309 }
310
Vladimir Marko70e97462016-08-09 11:04:26 +0100311 uint32_t GetSlowPathSpills(LocationSummary* locations, bool core_registers) const {
312 DCHECK(locations->OnlyCallsOnSlowPath() ||
313 (locations->Intrinsified() && locations->CallsOnMainAndSlowPath() &&
314 !locations->HasCustomSlowPathCallingConvention()));
315 uint32_t live_registers = core_registers
316 ? locations->GetLiveRegisters()->GetCoreRegisters()
317 : locations->GetLiveRegisters()->GetFloatingPointRegisters();
318 if (locations->HasCustomSlowPathCallingConvention()) {
319 // Save only the live registers that the custom calling convention wants us to save.
320 uint32_t caller_saves = core_registers
321 ? locations->GetCustomSlowPathCallerSaves().GetCoreRegisters()
322 : locations->GetCustomSlowPathCallerSaves().GetFloatingPointRegisters();
323 return live_registers & caller_saves;
324 } else {
325 // Default ABI, we need to spill non-callee-save live registers.
326 uint32_t callee_saves = core_registers ? core_callee_save_mask_ : fpu_callee_save_mask_;
327 return live_registers & ~callee_saves;
328 }
329 }
330
331 size_t GetNumberOfSlowPathSpills(LocationSummary* locations, bool core_registers) const {
332 return POPCOUNT(GetSlowPathSpills(locations, core_registers));
333 }
334
Mingyao Yang063fc772016-08-02 11:02:54 -0700335 size_t GetStackOffsetOfShouldDeoptimizeFlag() const {
336 DCHECK(GetGraph()->HasShouldDeoptimizeFlag());
337 DCHECK_GE(GetFrameSize(), FrameEntrySpillSize() + kShouldDeoptimizeFlagSize);
338 return GetFrameSize() - FrameEntrySpillSize() - kShouldDeoptimizeFlagSize;
339 }
340
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +0100341 // Record native to dex mapping for a suspend point. Required by runtime.
342 void RecordPcInfo(HInstruction* instruction,
343 uint32_t dex_pc,
344 uint32_t native_pc,
345 SlowPathCode* slow_path = nullptr,
346 bool native_debug_info = false);
347
348 // Record native to dex mapping for a suspend point.
349 // The native_pc is used from Assembler::CodePosition.
350 //
351 // Note: As Assembler::CodePosition is target dependent, it does not guarantee the exact native_pc
352 // for the instruction. If the exact native_pc is required it must be provided explicitly.
David Srbecky50fac062018-06-13 18:55:35 +0100353 void RecordPcInfo(HInstruction* instruction,
354 uint32_t dex_pc,
355 SlowPathCode* slow_path = nullptr,
356 bool native_debug_info = false);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +0100357
David Srbeckyb7070a22016-01-08 18:13:53 +0000358 // Check whether we have already recorded mapping at this PC.
359 bool HasStackMapAtCurrentPc();
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +0100360
David Srbeckyc7098ff2016-02-09 14:30:11 +0000361 // Record extra stack maps if we support native debugging.
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +0100362 //
363 // ARM specific behaviour: The recorded native PC might be a branch over pools to instructions
364 // corresponding the dex PC.
David Srbeckyd28f4a02016-03-14 17:14:24 +0000365 void MaybeRecordNativeDebugInfo(HInstruction* instruction,
366 uint32_t dex_pc,
367 SlowPathCode* slow_path = nullptr);
Yevgeny Rouban2a7c1ef2015-07-22 18:36:24 +0600368
Calin Juravle77520bc2015-01-12 18:45:46 +0000369 bool CanMoveNullCheckToUser(HNullCheck* null_check);
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +0100370 virtual void MaybeRecordImplicitNullCheck(HInstruction* instruction);
Vladimir Marko804b03f2016-09-14 16:26:36 +0100371 LocationSummary* CreateThrowingSlowPathLocations(
372 HInstruction* instruction, RegisterSet caller_saves = RegisterSet::Empty());
Calin Juravle2ae48182016-03-16 14:05:09 +0000373 void GenerateNullCheck(HNullCheck* null_check);
374 virtual void GenerateImplicitNullCheck(HNullCheck* null_check) = 0;
375 virtual void GenerateExplicitNullCheck(HNullCheck* null_check) = 0;
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000376
David Brazdil77a48ae2015-09-15 12:34:04 +0000377 // Records a stack map which the runtime might use to set catch phi values
378 // during exception delivery.
379 // TODO: Replace with a catch-entering instruction that records the environment.
380 void RecordCatchBlockInfo();
381
Vladimir Marko174b2e22017-10-12 13:34:49 +0100382 // Get the ScopedArenaAllocator used for codegen memory allocation.
383 ScopedArenaAllocator* GetScopedAllocator();
384
385 void AddSlowPath(SlowPathCode* slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100386
Andreas Gampe3f1dcd32018-12-28 09:39:56 -0800387 ScopedArenaVector<uint8_t> BuildStackMaps(const dex::CodeItem* code_item_for_osr_check);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100388 size_t GetNumberOfJitRoots() const;
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000389
390 // Fills the `literals` array with literals collected during code generation.
391 // Also emits literal patches.
392 void EmitJitRoots(uint8_t* code,
Vladimir Markoac3ac682018-09-20 11:01:43 +0100393 const uint8_t* roots_data,
394 /*out*/std::vector<Handle<mirror::Object>>* roots)
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000395 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000396
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +0100397 bool IsLeafMethod() const {
398 return is_leaf_;
399 }
400
401 void MarkNotLeaf() {
402 is_leaf_ = false;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000403 requires_current_method_ = true;
404 }
405
Santiago Aboy Solanese8a822d2021-09-13 14:40:53 +0100406 bool NeedsSuspendCheckEntry() const {
407 return needs_suspend_check_entry_;
408 }
409
410 void MarkNeedsSuspendCheckEntry() {
411 needs_suspend_check_entry_ = true;
412 }
413
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000414 void SetRequiresCurrentMethod() {
415 requires_current_method_ = true;
416 }
417
418 bool RequiresCurrentMethod() const {
419 return requires_current_method_;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +0100420 }
421
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100422 // Clears the spill slots taken by loop phis in the `LocationSummary` of the
423 // suspend check. This is called when the code generator generates code
424 // for the suspend check at the back edge (instead of where the suspend check
425 // is, which is the loop entry). At this point, the spill slots for the phis
426 // have not been written to.
Vladimir Markobea75ff2017-10-11 20:39:54 +0100427 void ClearSpillSlotsFromLoopPhisInStackMap(HSuspendCheck* suspend_check,
428 HParallelMove* spills) const;
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100429
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100430 bool* GetBlockedCoreRegisters() const { return blocked_core_registers_; }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100431 bool* GetBlockedFloatingPointRegisters() const { return blocked_fpu_registers_; }
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100432
Matthew Gharrityd9ffd0d2016-06-22 10:27:55 -0700433 bool IsBlockedCoreRegister(size_t i) { return blocked_core_registers_[i]; }
434 bool IsBlockedFloatingPointRegister(size_t i) { return blocked_fpu_registers_[i]; }
435
Vladimir Markodce016e2016-04-28 13:10:02 +0100436 // Helper that returns the offset of the array's length field.
437 // Note: Besides the normal arrays, we also use the HArrayLength for
438 // accessing the String's `count` field in String intrinsics.
439 static uint32_t GetArrayLengthOffset(HArrayLength* array_length);
440
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100441 // Helper that returns the offset of the array's data.
442 // Note: Besides the normal arrays, we also use the HArrayGet for
443 // accessing the String's `value` field in String intrinsics.
444 static uint32_t GetArrayDataOffset(HArrayGet* array_get);
445
Nicolas Geoffray90218252015-04-15 11:56:51 +0100446 void EmitParallelMoves(Location from1,
447 Location to1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100448 DataType::Type type1,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100449 Location from2,
450 Location to2,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100451 DataType::Type type2);
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000452
Vladimir Marko87584542017-12-12 17:47:52 +0000453 static bool InstanceOfNeedsReadBarrier(HInstanceOf* instance_of) {
454 // Used only for kExactCheck, kAbstractClassCheck, kClassHierarchyCheck and kArrayObjectCheck.
455 DCHECK(instance_of->GetTypeCheckKind() == TypeCheckKind::kExactCheck ||
456 instance_of->GetTypeCheckKind() == TypeCheckKind::kAbstractClassCheck ||
457 instance_of->GetTypeCheckKind() == TypeCheckKind::kClassHierarchyCheck ||
458 instance_of->GetTypeCheckKind() == TypeCheckKind::kArrayObjectCheck)
459 << instance_of->GetTypeCheckKind();
460 // If the target class is in the boot image, it's non-moveable and it doesn't matter
461 // if we compare it with a from-space or to-space reference, the result is the same.
462 // It's OK to traverse a class hierarchy jumping between from-space and to-space.
463 return kEmitCompilerReadBarrier && !instance_of->GetTargetClass()->IsInBootImage();
464 }
465
466 static ReadBarrierOption ReadBarrierOptionForInstanceOf(HInstanceOf* instance_of) {
467 return InstanceOfNeedsReadBarrier(instance_of) ? kWithReadBarrier : kWithoutReadBarrier;
468 }
469
470 static bool IsTypeCheckSlowPathFatal(HCheckCast* check_cast) {
471 switch (check_cast->GetTypeCheckKind()) {
472 case TypeCheckKind::kExactCheck:
473 case TypeCheckKind::kAbstractClassCheck:
474 case TypeCheckKind::kClassHierarchyCheck:
475 case TypeCheckKind::kArrayObjectCheck:
476 case TypeCheckKind::kInterfaceCheck: {
477 bool needs_read_barrier =
478 kEmitCompilerReadBarrier && !check_cast->GetTargetClass()->IsInBootImage();
479 // We do not emit read barriers for HCheckCast, so we can get false negatives
480 // and the slow path shall re-check and simply return if the cast is actually OK.
481 return !needs_read_barrier;
482 }
483 case TypeCheckKind::kArrayCheck:
484 case TypeCheckKind::kUnresolvedCheck:
485 return false;
Vladimir Marko175e7862018-03-27 09:03:13 +0000486 case TypeCheckKind::kBitstringCheck:
487 return true;
Vladimir Marko87584542017-12-12 17:47:52 +0000488 }
489 LOG(FATAL) << "Unreachable";
490 UNREACHABLE();
491 }
492
493 static LocationSummary::CallKind GetCheckCastCallKind(HCheckCast* check_cast) {
494 return (IsTypeCheckSlowPathFatal(check_cast) && !check_cast->CanThrowIntoCatchBlock())
495 ? LocationSummary::kNoCall // In fact, call on a fatal (non-returning) slow path.
496 : LocationSummary::kCallOnSlowPath;
497 }
498
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100499 static bool StoreNeedsWriteBarrier(DataType::Type type, HInstruction* value) {
David Brazdil522e2242015-03-17 18:48:28 +0000500 // Check that null value is not represented as an integer constant.
Santiago Aboy Solanes872ec722022-02-18 14:10:25 +0000501 DCHECK_IMPLIES(type == DataType::Type::kReference, !value->IsIntConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100502 return type == DataType::Type::kReference && !value->IsNullConstant();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +0000503 }
504
Roland Levillaindec8f632016-07-22 17:10:06 +0100505
Orion Hodsonac141392017-01-13 11:53:47 +0000506 // Performs checks pertaining to an InvokeRuntime call.
Alexandre Rames91a65162016-09-19 13:54:30 +0100507 void ValidateInvokeRuntime(QuickEntrypointEnum entrypoint,
508 HInstruction* instruction,
509 SlowPathCode* slow_path);
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100510
Orion Hodsonac141392017-01-13 11:53:47 +0000511 // Performs checks pertaining to an InvokeRuntimeWithoutRecordingPcInfo call.
Roland Levillaindec8f632016-07-22 17:10:06 +0100512 static void ValidateInvokeRuntimeWithoutRecordingPcInfo(HInstruction* instruction,
513 SlowPathCode* slow_path);
514
Nicolas Geoffray98893962015-01-21 12:32:32 +0000515 void AddAllocatedRegister(Location location) {
516 allocated_registers_.Add(location);
517 }
518
Nicolas Geoffray45b83af2015-07-06 15:12:53 +0000519 bool HasAllocatedRegister(bool is_core, int reg) const {
520 return is_core
521 ? allocated_registers_.ContainsCoreRegister(reg)
522 : allocated_registers_.ContainsFloatingPointRegister(reg);
523 }
524
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000525 void AllocateLocations(HInstruction* instruction);
526
Roland Levillainaa9b7c42015-02-17 15:40:09 +0000527 // Tells whether the stack frame of the compiled method is
528 // considered "empty", that is either actually having a size of zero,
529 // or just containing the saved return address register.
530 bool HasEmptyFrame() const {
531 return GetFrameSize() == (CallPushesPC() ? GetWordSize() : 0);
532 }
533
Nicolas Geoffray78612082017-07-24 14:18:53 +0100534 static int8_t GetInt8ValueOf(HConstant* constant) {
535 DCHECK(constant->IsIntConstant());
536 return constant->AsIntConstant()->GetValue();
537 }
538
539 static int16_t GetInt16ValueOf(HConstant* constant) {
540 DCHECK(constant->IsIntConstant());
541 return constant->AsIntConstant()->GetValue();
542 }
543
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000544 static int32_t GetInt32ValueOf(HConstant* constant) {
545 if (constant->IsIntConstant()) {
546 return constant->AsIntConstant()->GetValue();
547 } else if (constant->IsNullConstant()) {
548 return 0;
549 } else {
550 DCHECK(constant->IsFloatConstant());
Roland Levillainda4d79b2015-03-24 14:36:11 +0000551 return bit_cast<int32_t, float>(constant->AsFloatConstant()->GetValue());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000552 }
553 }
554
555 static int64_t GetInt64ValueOf(HConstant* constant) {
556 if (constant->IsIntConstant()) {
557 return constant->AsIntConstant()->GetValue();
558 } else if (constant->IsNullConstant()) {
559 return 0;
560 } else if (constant->IsFloatConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +0000561 return bit_cast<int32_t, float>(constant->AsFloatConstant()->GetValue());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000562 } else if (constant->IsLongConstant()) {
563 return constant->AsLongConstant()->GetValue();
564 } else {
565 DCHECK(constant->IsDoubleConstant());
Roland Levillainda4d79b2015-03-24 14:36:11 +0000566 return bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000567 }
568 }
569
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000570 size_t GetFirstRegisterSlotInSlowPath() const {
571 return first_register_slot_in_slow_path_;
572 }
573
574 uint32_t FrameEntrySpillSize() const {
575 return GetFpuSpillSize() + GetCoreSpillSize();
576 }
577
Roland Levillainec525fc2015-04-28 15:50:20 +0100578 virtual ParallelMoveResolver* GetMoveResolver() = 0;
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000579
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100580 static void CreateCommonInvokeLocationSummary(
581 HInvoke* invoke, InvokeDexCallingConventionVisitor* visitor);
582
Vladimir Marko86c87522020-05-11 16:55:55 +0100583 template <typename CriticalNativeCallingConventionVisitor,
584 size_t kNativeStackAlignment,
585 size_t GetCriticalNativeDirectCallFrameSize(const char* shorty, uint32_t shorty_len)>
Vladimir Markodec78172020-06-19 15:31:23 +0100586 size_t PrepareCriticalNativeCall(HInvokeStaticOrDirect* invoke) {
Vladimir Marko86c87522020-05-11 16:55:55 +0100587 DCHECK(!invoke->GetLocations()->Intrinsified());
588 CriticalNativeCallingConventionVisitor calling_convention_visitor(
589 /*for_register_allocation=*/ false);
Vladimir Markodec78172020-06-19 15:31:23 +0100590 HParallelMove parallel_move(GetGraph()->GetAllocator());
591 PrepareCriticalNativeArgumentMoves(invoke, &calling_convention_visitor, &parallel_move);
Vladimir Marko86c87522020-05-11 16:55:55 +0100592 size_t out_frame_size =
593 RoundUp(calling_convention_visitor.GetStackOffset(), kNativeStackAlignment);
594 if (kIsDebugBuild) {
595 uint32_t shorty_len;
596 const char* shorty = GetCriticalNativeShorty(invoke, &shorty_len);
597 DCHECK_EQ(GetCriticalNativeDirectCallFrameSize(shorty, shorty_len), out_frame_size);
598 }
599 if (out_frame_size != 0u) {
Vladimir Markodec78172020-06-19 15:31:23 +0100600 FinishCriticalNativeFrameSetup(out_frame_size, &parallel_move);
Vladimir Marko86c87522020-05-11 16:55:55 +0100601 }
602 return out_frame_size;
603 }
604
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100605 void GenerateInvokeStaticOrDirectRuntimeCall(
606 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path);
Orion Hodson4c8e12e2018-05-18 08:33:20 +0100607
Calin Juravle175dc732015-08-25 15:42:32 +0100608 void GenerateInvokeUnresolvedRuntimeCall(HInvokeUnresolved* invoke);
609
Andra Danciue3e187f2020-07-30 12:19:31 +0000610 void GenerateInvokePolymorphicCall(HInvokePolymorphic* invoke, SlowPathCode* slow_path = nullptr);
Orion Hodsonac141392017-01-13 11:53:47 +0000611
Orion Hodson4c8e12e2018-05-18 08:33:20 +0100612 void GenerateInvokeCustomCall(HInvokeCustom* invoke);
613
Vladimir Marko552a1342017-10-31 10:56:47 +0000614 void CreateStringBuilderAppendLocations(HStringBuilderAppend* instruction, Location out);
615
Calin Juravlee460d1d2015-09-29 04:52:17 +0100616 void CreateUnresolvedFieldLocationSummary(
617 HInstruction* field_access,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100618 DataType::Type field_type,
Calin Juravlee460d1d2015-09-29 04:52:17 +0100619 const FieldAccessCallingConvention& calling_convention);
620
621 void GenerateUnresolvedFieldAccess(
622 HInstruction* field_access,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100623 DataType::Type field_type,
Calin Juravlee460d1d2015-09-29 04:52:17 +0100624 uint32_t field_index,
625 uint32_t dex_pc,
626 const FieldAccessCallingConvention& calling_convention);
627
Vladimir Marko41559982017-01-06 14:04:23 +0000628 static void CreateLoadClassRuntimeCallLocationSummary(HLoadClass* cls,
629 Location runtime_type_index_location,
630 Location runtime_return_location);
631 void GenerateLoadClassRuntimeCall(HLoadClass* cls);
Calin Juravle98893e12015-10-02 21:05:03 +0100632
Orion Hodsondbaa5c72018-05-10 08:22:46 +0100633 static void CreateLoadMethodHandleRuntimeCallLocationSummary(HLoadMethodHandle* method_handle,
634 Location runtime_handle_index_location,
635 Location runtime_return_location);
636 void GenerateLoadMethodHandleRuntimeCall(HLoadMethodHandle* method_handle);
637
Orion Hodson18259d72018-04-12 11:18:23 +0100638 static void CreateLoadMethodTypeRuntimeCallLocationSummary(HLoadMethodType* method_type,
639 Location runtime_type_index_location,
640 Location runtime_return_location);
641 void GenerateLoadMethodTypeRuntimeCall(HLoadMethodType* method_type);
642
Vladimir Markode91ca92020-10-27 13:41:40 +0000643 static uint32_t GetBootImageOffset(ObjPtr<mirror::Object> object)
644 REQUIRES_SHARED(Locks::mutator_lock_);
645 static uint32_t GetBootImageOffset(HLoadClass* load_class);
646 static uint32_t GetBootImageOffset(HLoadString* load_string);
647 static uint32_t GetBootImageOffset(HInvoke* invoke);
Vladimir Marko98873af2020-12-16 12:10:03 +0000648 static uint32_t GetBootImageOffset(ClassRoot class_root);
Vladimir Markode91ca92020-10-27 13:41:40 +0000649 static uint32_t GetBootImageOffsetOfIntrinsicDeclaringClass(HInvoke* invoke);
Vladimir Markoe47f60c2018-02-21 13:43:28 +0000650
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100651 static void CreateSystemArrayCopyLocationSummary(HInvoke* invoke);
652
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100653 void SetDisassemblyInformation(DisassemblyInformation* info) { disasm_info_ = info; }
654 DisassemblyInformation* GetDisassemblyInformation() const { return disasm_info_; }
655
Calin Juravle175dc732015-08-25 15:42:32 +0100656 virtual void InvokeRuntime(QuickEntrypointEnum entrypoint,
657 HInstruction* instruction,
658 uint32_t dex_pc,
Vladimir Marko41559982017-01-06 14:04:23 +0000659 SlowPathCode* slow_path = nullptr) = 0;
Calin Juravle175dc732015-08-25 15:42:32 +0100660
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000661 // Check if the desired_string_load_kind is supported. If it is, return it,
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100662 // otherwise return a fall-back kind that should be used instead.
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000663 virtual HLoadString::LoadKind GetSupportedLoadStringKind(
664 HLoadString::LoadKind desired_string_load_kind) = 0;
665
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100666 // Check if the desired_class_load_kind is supported. If it is, return it,
667 // otherwise return a fall-back kind that should be used instead.
668 virtual HLoadClass::LoadKind GetSupportedLoadClassKind(
669 HLoadClass::LoadKind desired_class_load_kind) = 0;
670
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000671 static LocationSummary::CallKind GetLoadStringCallKind(HLoadString* load) {
672 switch (load->GetLoadKind()) {
673 case HLoadString::LoadKind::kBssEntry:
674 DCHECK(load->NeedsEnvironment());
675 return LocationSummary::kCallOnSlowPath;
Vladimir Marko847e6ce2017-06-02 13:55:07 +0100676 case HLoadString::LoadKind::kRuntimeCall:
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000677 DCHECK(load->NeedsEnvironment());
678 return LocationSummary::kCallOnMainOnly;
679 case HLoadString::LoadKind::kJitTableAddress:
680 DCHECK(!load->NeedsEnvironment());
681 return kEmitCompilerReadBarrier
682 ? LocationSummary::kCallOnSlowPath
683 : LocationSummary::kNoCall;
684 break;
685 default:
686 DCHECK(!load->NeedsEnvironment());
687 return LocationSummary::kNoCall;
688 }
689 }
690
Vladimir Markodc151b22015-10-15 18:02:30 +0100691 // Check if the desired_dispatch_info is supported. If it is, return it,
692 // otherwise return a fall-back info that should be used instead.
693 virtual HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
694 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +0100695 ArtMethod* method) = 0;
Vladimir Markodc151b22015-10-15 18:02:30 +0100696
Andreas Gampe85b62f22015-09-09 13:15:38 -0700697 // Generate a call to a static or direct method.
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100698 virtual void GenerateStaticOrDirectCall(
699 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) = 0;
Andreas Gampe85b62f22015-09-09 13:15:38 -0700700 // Generate a call to a virtual method.
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100701 virtual void GenerateVirtualCall(
702 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) = 0;
Andreas Gampe85b62f22015-09-09 13:15:38 -0700703
704 // Copy the result of a call into the given target.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100705 virtual void MoveFromReturnRegister(Location trg, DataType::Type type) = 0;
Andreas Gampe85b62f22015-09-09 13:15:38 -0700706
Vladimir Markodec78172020-06-19 15:31:23 +0100707 virtual void IncreaseFrame(size_t adjustment) = 0;
708 virtual void DecreaseFrame(size_t adjustment) = 0;
709
David Srbeckyc7098ff2016-02-09 14:30:11 +0000710 virtual void GenerateNop() = 0;
711
Vladimir Markob5461632018-10-15 14:24:21 +0100712 static QuickEntrypointEnum GetArrayAllocationEntrypoint(HNewArray* new_array);
Ulya Trafimovichc8451cb2021-06-02 17:35:16 +0100713 static ScaleFactor ScaleFactorForType(DataType::Type type);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +0000714
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000715 protected:
Vladimir Markoaad75c62016-10-03 08:46:48 +0000716 // Patch info used for recording locations of required linker patches and their targets,
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000717 // i.e. target method, string, type or code identified by their dex file and index,
718 // or .data.bimg.rel.ro entries identified by the boot image offset.
Vladimir Marko58155012015-08-19 12:49:41 +0000719 template <typename LabelType>
Vladimir Markoaad75c62016-10-03 08:46:48 +0000720 struct PatchInfo {
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000721 PatchInfo(const DexFile* dex_file, uint32_t off_or_idx)
722 : target_dex_file(dex_file), offset_or_index(off_or_idx), label() { }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000723
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000724 // Target dex file or null for .data.bmig.rel.ro patches.
725 const DexFile* target_dex_file;
726 // Either the boot image offset (to write to .data.bmig.rel.ro) or string/type/method index.
727 uint32_t offset_or_index;
728 // Label for the instruction to patch.
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100729 LabelType label;
730 };
731
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100732 CodeGenerator(HGraph* graph,
733 size_t number_of_core_registers,
734 size_t number_of_fpu_registers,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000735 size_t number_of_register_pairs,
Nicolas Geoffray98893962015-01-21 12:32:32 +0000736 uint32_t core_callee_save_mask,
737 uint32_t fpu_callee_save_mask,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100738 const CompilerOptions& compiler_options,
Vladimir Marko174b2e22017-10-12 13:34:49 +0100739 OptimizingCompilerStats* stats);
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000740
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000741 virtual HGraphVisitor* GetLocationBuilder() = 0;
742 virtual HGraphVisitor* GetInstructionVisitor() = 0;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000743
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000744 // Returns the location of the first spilled entry for floating point registers,
745 // relative to the stack pointer.
746 uint32_t GetFpuSpillStart() const {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000747 return GetFrameSize() - FrameEntrySpillSize();
748 }
749
750 uint32_t GetFpuSpillSize() const {
Artem Serov6a0b6572019-07-26 20:38:37 +0100751 return POPCOUNT(fpu_spill_mask_) * GetCalleePreservedFPWidth();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000752 }
753
754 uint32_t GetCoreSpillSize() const {
755 return POPCOUNT(core_spill_mask_) * GetWordSize();
756 }
757
Alexey Frunze58320ce2016-08-30 21:40:46 -0700758 virtual bool HasAllocatedCalleeSaveRegisters() const {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000759 // We check the core registers against 1 because it always comprises the return PC.
760 return (POPCOUNT(allocated_registers_.GetCoreRegisters() & core_callee_save_mask_) != 1)
761 || (POPCOUNT(allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_) != 0);
762 }
763
764 bool CallPushesPC() const {
765 InstructionSet instruction_set = GetInstructionSet();
Vladimir Marko33bff252017-11-01 14:35:42 +0000766 return instruction_set == InstructionSet::kX86 || instruction_set == InstructionSet::kX86_64;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000767 }
768
Vladimir Marko225b6462015-09-28 12:17:40 +0100769 // Arm64 has its own type for a label, so we need to templatize these methods
Nicolas Geoffraydc23d832015-02-16 11:15:43 +0000770 // to share the logic.
Vladimir Marko225b6462015-09-28 12:17:40 +0100771
772 template <typename LabelType>
773 LabelType* CommonInitializeLabels() {
Vladimir Markob95fb772015-09-30 13:32:31 +0100774 // We use raw array allocations instead of ArenaVector<> because Labels are
775 // non-constructible and non-movable and as such cannot be held in a vector.
Vladimir Marko225b6462015-09-28 12:17:40 +0100776 size_t size = GetGraph()->GetBlocks().size();
Vladimir Markoca6fff82017-10-03 14:49:14 +0100777 LabelType* labels =
778 GetGraph()->GetAllocator()->AllocArray<LabelType>(size, kArenaAllocCodeGenerator);
Vladimir Marko225b6462015-09-28 12:17:40 +0100779 for (size_t i = 0; i != size; ++i) {
780 new(labels + i) LabelType();
781 }
782 return labels;
783 }
784
Vladimir Marko58155012015-08-19 12:49:41 +0000785 template <typename LabelType>
786 LabelType* CommonGetLabelOf(LabelType* raw_pointer_to_labels_array, HBasicBlock* block) const {
Nicolas Geoffraydc23d832015-02-16 11:15:43 +0000787 block = FirstNonEmptyBlock(block);
788 return raw_pointer_to_labels_array + block->GetBlockId();
789 }
790
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000791 SlowPathCode* GetCurrentSlowPath() {
792 return current_slow_path_;
793 }
794
Vladimir Marko174b2e22017-10-12 13:34:49 +0100795 StackMapStream* GetStackMapStream();
796
797 void ReserveJitStringRoot(StringReference string_reference, Handle<mirror::String> string);
798 uint64_t GetJitStringRootIndex(StringReference string_reference);
799 void ReserveJitClassRoot(TypeReference type_reference, Handle<mirror::Class> klass);
800 uint64_t GetJitClassRootIndex(TypeReference type_reference);
801
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000802 // Emit the patches assocatied with JIT roots. Only applies to JIT compiled code.
Vladimir Marko174b2e22017-10-12 13:34:49 +0100803 virtual void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data);
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000804
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000805 // Frame size required for this method.
806 uint32_t frame_size_;
807 uint32_t core_spill_mask_;
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000808 uint32_t fpu_spill_mask_;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100809 uint32_t first_register_slot_in_slow_path_;
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000810
Nicolas Geoffray98893962015-01-21 12:32:32 +0000811 // Registers that were allocated during linear scan.
812 RegisterSet allocated_registers_;
813
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100814 // Arrays used when doing register allocation to know which
815 // registers we can allocate. `SetupBlockedRegisters` updates the
816 // arrays.
817 bool* const blocked_core_registers_;
818 bool* const blocked_fpu_registers_;
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100819 size_t number_of_core_registers_;
820 size_t number_of_fpu_registers_;
821 size_t number_of_register_pairs_;
Nicolas Geoffray98893962015-01-21 12:32:32 +0000822 const uint32_t core_callee_save_mask_;
823 const uint32_t fpu_callee_save_mask_;
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100824
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000825 // The order to use for code generation.
Vladimir Markofa6b93c2015-09-15 10:15:55 +0100826 const ArenaVector<HBasicBlock*>* block_order_;
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000827
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100828 DisassemblyInformation* disasm_info_;
829
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +0000830 private:
Vladimir Marko174b2e22017-10-12 13:34:49 +0100831 class CodeGenerationData;
832
833 void InitializeCodeGenerationData();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100834 size_t GetStackOffsetOfSavedRegister(size_t index);
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100835 void GenerateSlowPaths();
Mark Mendell5f874182015-03-04 15:42:45 -0500836 void BlockIfInRegister(Location location, bool is_out = false) const;
Artem Serov2808be82018-12-20 19:15:11 +0000837 void EmitEnvironment(HEnvironment* environment,
838 SlowPathCode* slow_path,
839 bool needs_vreg_info = true);
840 void EmitVRegInfo(HEnvironment* environment, SlowPathCode* slow_path);
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000841
Vladimir Marko86c87522020-05-11 16:55:55 +0100842 static void PrepareCriticalNativeArgumentMoves(
843 HInvokeStaticOrDirect* invoke,
844 /*inout*/InvokeDexCallingConventionVisitor* visitor,
845 /*out*/HParallelMove* parallel_move);
846
Vladimir Markodec78172020-06-19 15:31:23 +0100847 void FinishCriticalNativeFrameSetup(size_t out_frame_size, /*inout*/HParallelMove* parallel_move);
Vladimir Marko86c87522020-05-11 16:55:55 +0100848
849 static const char* GetCriticalNativeShorty(HInvokeStaticOrDirect* invoke, uint32_t* shorty_len);
850
Serban Constantinescuecc43662015-08-13 13:33:12 +0100851 OptimizingCompilerStats* stats_;
852
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000853 HGraph* const graph_;
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000854 const CompilerOptions& compiler_options_;
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000855
Aart Bik42249c32016-01-07 15:33:50 -0800856 // The current slow-path that we're generating code for.
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000857 SlowPathCode* current_slow_path_;
858
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +0000859 // The current block index in `block_order_` of the block
860 // we are generating code for.
861 size_t current_block_index_;
862
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000863 // Whether the method is a leaf method.
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +0100864 bool is_leaf_;
865
Santiago Aboy Solanese8a822d2021-09-13 14:40:53 +0100866 // Whether the method has to emit a SuspendCheck at entry.
867 bool needs_suspend_check_entry_;
868
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000869 // Whether an instruction in the graph accesses the current method.
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000870 // TODO: Rename: this actually indicates that some instruction in the method
871 // needs the environment including a valid stack frame.
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000872 bool requires_current_method_;
873
Vladimir Marko174b2e22017-10-12 13:34:49 +0100874 // The CodeGenerationData contains a ScopedArenaAllocator intended for reusing the
875 // ArenaStack memory allocated in previous passes instead of adding to the memory
876 // held by the ArenaAllocator. This ScopedArenaAllocator is created in
877 // CodeGenerator::Compile() and remains alive until the CodeGenerator is destroyed.
878 std::unique_ptr<CodeGenerationData> code_generation_data_;
879
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100880 friend class OptimizingCFITest;
Artem Serov6a0b6572019-07-26 20:38:37 +0100881 ART_FRIEND_TEST(CodegenTest, ARM64FrameSizeSIMD);
882 ART_FRIEND_TEST(CodegenTest, ARM64FrameSizeNoSIMD);
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100883
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000884 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
885};
886
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100887template <typename C, typename F>
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100888class CallingConvention {
889 public:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100890 CallingConvention(const C* registers,
891 size_t number_of_registers,
892 const F* fpu_registers,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700893 size_t number_of_fpu_registers,
Andreas Gampe542451c2016-07-26 09:02:02 -0700894 PointerSize pointer_size)
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100895 : registers_(registers),
896 number_of_registers_(number_of_registers),
897 fpu_registers_(fpu_registers),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700898 number_of_fpu_registers_(number_of_fpu_registers),
899 pointer_size_(pointer_size) {}
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100900
901 size_t GetNumberOfRegisters() const { return number_of_registers_; }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100902 size_t GetNumberOfFpuRegisters() const { return number_of_fpu_registers_; }
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100903
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100904 C GetRegisterAt(size_t index) const {
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100905 DCHECK_LT(index, number_of_registers_);
906 return registers_[index];
907 }
908
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100909 F GetFpuRegisterAt(size_t index) const {
910 DCHECK_LT(index, number_of_fpu_registers_);
911 return fpu_registers_[index];
912 }
913
914 size_t GetStackOffsetOf(size_t index) const {
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100915 // We still reserve the space for parameters passed by registers.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700916 // Add space for the method pointer.
Andreas Gampe542451c2016-07-26 09:02:02 -0700917 return static_cast<size_t>(pointer_size_) + index * kVRegSize;
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100918 }
919
920 private:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100921 const C* registers_;
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100922 const size_t number_of_registers_;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100923 const F* fpu_registers_;
924 const size_t number_of_fpu_registers_;
Andreas Gampe542451c2016-07-26 09:02:02 -0700925 const PointerSize pointer_size_;
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100926
927 DISALLOW_COPY_AND_ASSIGN(CallingConvention);
928};
929
Aart Bik42249c32016-01-07 15:33:50 -0800930/**
931 * A templated class SlowPathGenerator with a templated method NewSlowPath()
932 * that can be used by any code generator to share equivalent slow-paths with
933 * the objective of reducing generated code size.
934 *
935 * InstructionType: instruction that requires SlowPathCodeType
936 * SlowPathCodeType: subclass of SlowPathCode, with constructor SlowPathCodeType(InstructionType *)
937 */
938template <typename InstructionType>
939class SlowPathGenerator {
940 static_assert(std::is_base_of<HInstruction, InstructionType>::value,
941 "InstructionType is not a subclass of art::HInstruction");
942
943 public:
944 SlowPathGenerator(HGraph* graph, CodeGenerator* codegen)
945 : graph_(graph),
946 codegen_(codegen),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100947 slow_path_map_(std::less<uint32_t>(),
948 graph->GetAllocator()->Adapter(kArenaAllocSlowPaths)) {}
Aart Bik42249c32016-01-07 15:33:50 -0800949
950 // Creates and adds a new slow-path, if needed, or returns existing one otherwise.
951 // Templating the method (rather than the whole class) on the slow-path type enables
952 // keeping this code at a generic, non architecture-specific place.
953 //
954 // NOTE: This approach assumes each InstructionType only generates one SlowPathCodeType.
955 // To relax this requirement, we would need some RTTI on the stored slow-paths,
956 // or template the class as a whole on SlowPathType.
957 template <typename SlowPathCodeType>
958 SlowPathCodeType* NewSlowPath(InstructionType* instruction) {
959 static_assert(std::is_base_of<SlowPathCode, SlowPathCodeType>::value,
960 "SlowPathCodeType is not a subclass of art::SlowPathCode");
961 static_assert(std::is_constructible<SlowPathCodeType, InstructionType*>::value,
962 "SlowPathCodeType is not constructible from InstructionType*");
963 // Iterate over potential candidates for sharing. Currently, only same-typed
964 // slow-paths with exactly the same dex-pc are viable candidates.
965 // TODO: pass dex-pc/slow-path-type to run-time to allow even more sharing?
966 const uint32_t dex_pc = instruction->GetDexPc();
967 auto iter = slow_path_map_.find(dex_pc);
968 if (iter != slow_path_map_.end()) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +0100969 const ArenaVector<std::pair<InstructionType*, SlowPathCode*>>& candidates = iter->second;
Aart Bik42249c32016-01-07 15:33:50 -0800970 for (const auto& it : candidates) {
971 InstructionType* other_instruction = it.first;
972 SlowPathCodeType* other_slow_path = down_cast<SlowPathCodeType*>(it.second);
973 // Determine if the instructions allow for slow-path sharing.
974 if (HaveSameLiveRegisters(instruction, other_instruction) &&
975 HaveSameStackMap(instruction, other_instruction)) {
976 // Can share: reuse existing one.
977 return other_slow_path;
978 }
979 }
980 } else {
981 // First time this dex-pc is seen.
Vladimir Markoca6fff82017-10-03 14:49:14 +0100982 iter = slow_path_map_.Put(dex_pc,
983 {{}, {graph_->GetAllocator()->Adapter(kArenaAllocSlowPaths)}});
Aart Bik42249c32016-01-07 15:33:50 -0800984 }
985 // Cannot share: create and add new slow-path for this particular dex-pc.
Vladimir Marko174b2e22017-10-12 13:34:49 +0100986 SlowPathCodeType* slow_path =
987 new (codegen_->GetScopedAllocator()) SlowPathCodeType(instruction);
Aart Bik42249c32016-01-07 15:33:50 -0800988 iter->second.emplace_back(std::make_pair(instruction, slow_path));
989 codegen_->AddSlowPath(slow_path);
990 return slow_path;
991 }
992
993 private:
994 // Tests if both instructions have same set of live physical registers. This ensures
995 // the slow-path has exactly the same preamble on saving these registers to stack.
996 bool HaveSameLiveRegisters(const InstructionType* i1, const InstructionType* i2) const {
997 const uint32_t core_spill = ~codegen_->GetCoreSpillMask();
998 const uint32_t fpu_spill = ~codegen_->GetFpuSpillMask();
999 RegisterSet* live1 = i1->GetLocations()->GetLiveRegisters();
1000 RegisterSet* live2 = i2->GetLocations()->GetLiveRegisters();
1001 return (((live1->GetCoreRegisters() & core_spill) ==
1002 (live2->GetCoreRegisters() & core_spill)) &&
1003 ((live1->GetFloatingPointRegisters() & fpu_spill) ==
1004 (live2->GetFloatingPointRegisters() & fpu_spill)));
1005 }
1006
1007 // Tests if both instructions have the same stack map. This ensures the interpreter
1008 // will find exactly the same dex-registers at the same entries.
1009 bool HaveSameStackMap(const InstructionType* i1, const InstructionType* i2) const {
1010 DCHECK(i1->HasEnvironment());
1011 DCHECK(i2->HasEnvironment());
1012 // We conservatively test if the two instructions find exactly the same instructions
1013 // and location in each dex-register. This guarantees they will have the same stack map.
1014 HEnvironment* e1 = i1->GetEnvironment();
1015 HEnvironment* e2 = i2->GetEnvironment();
1016 if (e1->GetParent() != e2->GetParent() || e1->Size() != e2->Size()) {
1017 return false;
1018 }
1019 for (size_t i = 0, sz = e1->Size(); i < sz; ++i) {
1020 if (e1->GetInstructionAt(i) != e2->GetInstructionAt(i) ||
1021 !e1->GetLocationAt(i).Equals(e2->GetLocationAt(i))) {
1022 return false;
1023 }
1024 }
1025 return true;
1026 }
1027
1028 HGraph* const graph_;
1029 CodeGenerator* const codegen_;
1030
1031 // Map from dex-pc to vector of already existing instruction/slow-path pairs.
1032 ArenaSafeMap<uint32_t, ArenaVector<std::pair<InstructionType*, SlowPathCode*>>> slow_path_map_;
1033
1034 DISALLOW_COPY_AND_ASSIGN(SlowPathGenerator);
1035};
1036
1037class InstructionCodeGenerator : public HGraphVisitor {
1038 public:
1039 InstructionCodeGenerator(HGraph* graph, CodeGenerator* codegen)
1040 : HGraphVisitor(graph),
1041 deopt_slow_paths_(graph, codegen) {}
1042
1043 protected:
1044 // Add slow-path generator for each instruction/slow-path combination that desires sharing.
1045 // TODO: under current regime, only deopt sharing make sense; extend later.
1046 SlowPathGenerator<HDeoptimize> deopt_slow_paths_;
1047};
1048
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001049} // namespace art
1050
1051#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_H_