blob: 5086f922b02ccf120e868dd15d8925a77df83b39 [file] [log] [blame]
Nicolas Geoffray004c2302015-03-20 10:06:38 +00001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "stack_map.h"
18
David Srbecky71ec1cc2018-05-18 15:57:25 +010019#include <iomanip>
Nicolas Geoffray896f8f72015-03-30 15:44:25 +010020#include <stdint.h>
21
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000022#include "art_method.h"
David Sehr9c4a0152018-04-05 12:23:54 -070023#include "base/indenter.h"
David Srbecky86decb62018-06-05 06:41:10 +010024#include "base/stats.h"
David Srbeckyf6ba5b32018-06-23 22:05:49 +010025#include "oat_quick_method_header.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000026#include "scoped_thread_state_change-inl.h"
Roland Levillain0396ed72015-05-27 15:12:19 +010027
Nicolas Geoffray004c2302015-03-20 10:06:38 +000028namespace art {
29
David Srbecky0d4567f2019-05-30 22:45:40 +010030// The callback is used to inform the caller about memory bounds of the bit-tables.
31template<typename DecodeCallback>
32CodeInfo::CodeInfo(const uint8_t* data, size_t* num_read_bits, DecodeCallback callback) {
David Srbecky3aaaa212018-07-30 16:46:53 +010033 BitMemoryReader reader(data);
David Srbecky6c4ec5c2019-06-20 07:23:19 +000034 std::array<uint32_t, kNumHeaders> header = reader.ReadInterleavedVarints<kNumHeaders>();
David Srbecky697c47a2019-06-16 21:53:07 +010035 ForEachHeaderField([this, &header](size_t i, auto member_pointer) {
36 this->*member_pointer = header[i];
37 });
David Srbecky0d4567f2019-05-30 22:45:40 +010038 ForEachBitTableField([this, &reader, &callback](size_t i, auto member_pointer) {
David Srbecky697c47a2019-06-16 21:53:07 +010039 auto& table = this->*member_pointer;
David Srbecky6c4ec5c2019-06-20 07:23:19 +000040 if (LIKELY(HasBitTable(i))) {
David Srbecky697c47a2019-06-16 21:53:07 +010041 if (UNLIKELY(IsBitTableDeduped(i))) {
42 ssize_t bit_offset = reader.NumberOfReadBits() - reader.ReadVarint();
43 BitMemoryReader reader2(reader.data(), bit_offset); // The offset is negative.
44 table.Decode(reader2);
David Srbecky0d4567f2019-05-30 22:45:40 +010045 callback(i, &table, reader2.GetReadRegion());
David Srbecky697c47a2019-06-16 21:53:07 +010046 } else {
David Srbecky0d4567f2019-05-30 22:45:40 +010047 ssize_t bit_offset = reader.NumberOfReadBits();
David Srbecky697c47a2019-06-16 21:53:07 +010048 table.Decode(reader);
David Srbecky0d4567f2019-05-30 22:45:40 +010049 callback(i, &table, reader.GetReadRegion().Subregion(bit_offset));
David Srbecky697c47a2019-06-16 21:53:07 +010050 }
51 }
David Srbecky0d4567f2019-05-30 22:45:40 +010052 });
53 if (num_read_bits != nullptr) {
54 *num_read_bits = reader.NumberOfReadBits();
David Srbeckye42a4b92019-05-26 00:10:25 +010055 }
David Srbecky078d7ba2018-06-21 15:36:48 +010056}
57
David Srbecky0d4567f2019-05-30 22:45:40 +010058CodeInfo::CodeInfo(const uint8_t* data, size_t* num_read_bits)
59 : CodeInfo(data, num_read_bits, [](size_t, auto*, BitMemoryRegion){}) {}
60
61CodeInfo::CodeInfo(const OatQuickMethodHeader* header)
62 : CodeInfo(header->GetOptimizedCodeInfoPtr()) {}
63
64QuickMethodFrameInfo CodeInfo::DecodeFrameInfo(const uint8_t* data) {
65 CodeInfo code_info(data);
66 return QuickMethodFrameInfo(code_info.packed_frame_size_ * kStackAlignment,
67 code_info.core_spill_mask_,
68 code_info.fp_spill_mask_);
69}
70
71CodeInfo CodeInfo::DecodeGcMasksOnly(const OatQuickMethodHeader* header) {
72 CodeInfo code_info(header->GetOptimizedCodeInfoPtr());
73 CodeInfo copy; // Copy to dead-code-eliminate all fields that we do not need.
74 copy.stack_maps_ = code_info.stack_maps_;
75 copy.register_masks_ = code_info.register_masks_;
76 copy.stack_masks_ = code_info.stack_masks_;
77 return copy;
78}
79
80CodeInfo CodeInfo::DecodeInlineInfoOnly(const OatQuickMethodHeader* header) {
81 CodeInfo code_info(header->GetOptimizedCodeInfoPtr());
82 CodeInfo copy; // Copy to dead-code-eliminate all fields that we do not need.
83 copy.number_of_dex_registers_ = code_info.number_of_dex_registers_;
84 copy.stack_maps_ = code_info.stack_maps_;
85 copy.inline_infos_ = code_info.inline_infos_;
86 copy.method_infos_ = code_info.method_infos_;
87 return copy;
88}
89
David Srbecky17b4d2b2021-03-02 18:14:31 +000090uint32_t CodeInfo::DecodeCodeSize(const OatQuickMethodHeader* header) {
91 return CodeInfo(header->GetOptimizedCodeInfoPtr()).code_size_;
92}
93
David Srbecky42deda82018-08-10 11:23:27 +010094size_t CodeInfo::Deduper::Dedupe(const uint8_t* code_info_data) {
David Srbeckyd1606412018-07-31 15:05:14 +010095 writer_.ByteAlign();
96 size_t deduped_offset = writer_.NumberOfWrittenBits() / kBitsPerByte;
David Srbecky697c47a2019-06-16 21:53:07 +010097
David Srbecky6c4ec5c2019-06-20 07:23:19 +000098 // The back-reference offset takes space so dedupe is not worth it for tiny tables.
99 constexpr size_t kMinDedupSize = 32; // Assume 32-bit offset on average.
100
David Srbecky697c47a2019-06-16 21:53:07 +0100101 // Read the existing code info and find (and keep) dedup-map iterator for each table.
102 // The iterator stores BitMemoryRegion and bit_offset of previous identical BitTable.
David Srbecky697c47a2019-06-16 21:53:07 +0100103 std::map<BitMemoryRegion, uint32_t, BitMemoryRegion::Less>::iterator it[kNumBitTables];
David Srbecky0d4567f2019-05-30 22:45:40 +0100104 CodeInfo code_info(code_info_data, nullptr, [&](size_t i, auto*, BitMemoryRegion region) {
105 it[i] = dedupe_map_.emplace(region, /*bit_offset=*/0).first;
106 if (it[i]->second != 0 && region.size_in_bits() > kMinDedupSize) { // Seen before and large?
107 code_info.SetBitTableDeduped(i); // Mark as deduped before we write header.
David Srbecky697c47a2019-06-16 21:53:07 +0100108 }
109 });
110
111 // Write the code info back, but replace deduped tables with relative offsets.
David Srbecky0d4567f2019-05-30 22:45:40 +0100112 std::array<uint32_t, kNumHeaders> header;
David Srbecky6c4ec5c2019-06-20 07:23:19 +0000113 ForEachHeaderField([&code_info, &header](size_t i, auto member_pointer) {
114 header[i] = code_info.*member_pointer;
David Srbecky42deda82018-08-10 11:23:27 +0100115 });
David Srbecky6c4ec5c2019-06-20 07:23:19 +0000116 writer_.WriteInterleavedVarints(header);
David Srbecky697c47a2019-06-16 21:53:07 +0100117 ForEachBitTableField([this, &code_info, &it](size_t i, auto) {
118 if (code_info.HasBitTable(i)) {
119 uint32_t& bit_offset = it[i]->second;
120 if (code_info.IsBitTableDeduped(i)) {
121 DCHECK_NE(bit_offset, 0u);
122 writer_.WriteVarint(writer_.NumberOfWrittenBits() - bit_offset);
123 } else {
124 bit_offset = writer_.NumberOfWrittenBits(); // Store offset in dedup map.
125 writer_.WriteRegion(it[i]->first);
126 }
David Srbecky42deda82018-08-10 11:23:27 +0100127 }
128 });
David Srbeckyd1606412018-07-31 15:05:14 +0100129
130 if (kIsDebugBuild) {
David Srbecky42deda82018-08-10 11:23:27 +0100131 CodeInfo old_code_info(code_info_data);
David Srbeckyd1606412018-07-31 15:05:14 +0100132 CodeInfo new_code_info(writer_.data() + deduped_offset);
David Srbecky697c47a2019-06-16 21:53:07 +0100133 ForEachHeaderField([&old_code_info, &new_code_info](size_t, auto member_pointer) {
134 if (member_pointer != &CodeInfo::bit_table_flags_) { // Expected to differ.
135 DCHECK_EQ(old_code_info.*member_pointer, new_code_info.*member_pointer);
136 }
David Srbecky42deda82018-08-10 11:23:27 +0100137 });
David Srbecky6c4ec5c2019-06-20 07:23:19 +0000138 ForEachBitTableField([&old_code_info, &new_code_info](size_t i, auto member_pointer) {
139 DCHECK_EQ(old_code_info.HasBitTable(i), new_code_info.HasBitTable(i));
David Srbecky42deda82018-08-10 11:23:27 +0100140 DCHECK((old_code_info.*member_pointer).Equals(new_code_info.*member_pointer));
141 });
David Srbeckyd1606412018-07-31 15:05:14 +0100142 }
143
144 return deduped_offset;
David Srbeckyb73323c2018-07-15 23:58:44 +0100145}
146
Eric Holkf1e1dd12020-08-21 15:38:12 -0700147StackMap CodeInfo::GetStackMapForNativePcOffset(uintptr_t pc, InstructionSet isa) const {
David Srbecky0d4567f2019-05-30 22:45:40 +0100148 uint32_t packed_pc = StackMap::PackNativePc(pc, isa);
149 // Binary search. All catch stack maps are stored separately at the end.
150 auto it = std::partition_point(
David Srbecky0b4e5a32018-06-11 16:25:29 +0100151 stack_maps_.begin(),
152 stack_maps_.end(),
153 [packed_pc](const StackMap& sm) {
154 return sm.GetPackedNativePc() < packed_pc && sm.GetKind() != StackMap::Kind::Catch;
155 });
David Srbecky0b4e5a32018-06-11 16:25:29 +0100156 // Start at the lower bound and iterate over all stack maps with the given native pc.
157 for (; it != stack_maps_.end() && (*it).GetNativePcOffset(isa) == pc; ++it) {
158 StackMap::Kind kind = static_cast<StackMap::Kind>((*it).GetKind());
159 if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
160 return *it;
161 }
162 }
David Srbeckya45a85c2018-06-21 16:03:12 +0100163 return stack_maps_.GetInvalidRow();
David Srbecky0b4e5a32018-06-11 16:25:29 +0100164}
165
David Srbecky6de88332018-06-03 12:00:11 +0100166// Scan backward to determine dex register locations at given stack map.
167// All registers for a stack map are combined - inlined registers are just appended,
168// therefore 'first_dex_register' allows us to select a sub-range to decode.
169void CodeInfo::DecodeDexRegisterMap(uint32_t stack_map_index,
170 uint32_t first_dex_register,
171 /*out*/ DexRegisterMap* map) const {
172 // Count remaining work so we know when we have finished.
173 uint32_t remaining_registers = map->size();
174
175 // Keep scanning backwards and collect the most recent location of each register.
176 for (int32_t s = stack_map_index; s >= 0 && remaining_registers != 0; s--) {
177 StackMap stack_map = GetStackMapAt(s);
178 DCHECK_LE(stack_map_index - s, kMaxDexRegisterMapSearchDistance) << "Unbounded search";
179
180 // The mask specifies which registers where modified in this stack map.
181 // NB: the mask can be shorter than expected if trailing zero bits were removed.
182 uint32_t mask_index = stack_map.GetDexRegisterMaskIndex();
183 if (mask_index == StackMap::kNoValue) {
184 continue; // Nothing changed at this stack map.
185 }
186 BitMemoryRegion mask = dex_register_masks_.GetBitMemoryRegion(mask_index);
187 if (mask.size_in_bits() <= first_dex_register) {
188 continue; // Nothing changed after the first register we are interested in.
189 }
190
191 // The map stores one catalogue index per each modified register location.
192 uint32_t map_index = stack_map.GetDexRegisterMapIndex();
193 DCHECK_NE(map_index, StackMap::kNoValue);
194
195 // Skip initial registers which we are not interested in (to get to inlined registers).
196 map_index += mask.PopCount(0, first_dex_register);
197 mask = mask.Subregion(first_dex_register, mask.size_in_bits() - first_dex_register);
198
199 // Update registers that we see for first time (i.e. most recent value).
200 DexRegisterLocation* regs = map->data();
201 const uint32_t end = std::min<uint32_t>(map->size(), mask.size_in_bits());
202 const size_t kNumBits = BitSizeOf<uint32_t>();
203 for (uint32_t reg = 0; reg < end; reg += kNumBits) {
204 // Process the mask in chunks of kNumBits for performance.
205 uint32_t bits = mask.LoadBits(reg, std::min<uint32_t>(end - reg, kNumBits));
206 while (bits != 0) {
207 uint32_t bit = CTZ(bits);
208 if (regs[reg + bit].GetKind() == DexRegisterLocation::Kind::kInvalid) {
209 regs[reg + bit] = GetDexRegisterCatalogEntry(dex_register_maps_.Get(map_index));
210 remaining_registers--;
211 }
212 map_index++;
213 bits ^= 1u << bit; // Clear the bit.
214 }
215 }
216 }
217
218 // Set any remaining registers to None (which is the default state at first stack map).
219 if (remaining_registers != 0) {
220 DexRegisterLocation* regs = map->data();
221 for (uint32_t r = 0; r < map->size(); r++) {
222 if (regs[r].GetKind() == DexRegisterLocation::Kind::kInvalid) {
223 regs[r] = DexRegisterLocation::None();
224 }
225 }
226 }
227}
228
David Srbecky42deda82018-08-10 11:23:27 +0100229// Decode the CodeInfo while collecting size statistics.
230void CodeInfo::CollectSizeStats(const uint8_t* code_info_data, /*out*/ Stats* parent) {
231 Stats* codeinfo_stats = parent->Child("CodeInfo");
232 BitMemoryReader reader(code_info_data);
David Srbecky0d4567f2019-05-30 22:45:40 +0100233 reader.ReadInterleavedVarints<kNumHeaders>();
David Srbecky697c47a2019-06-16 21:53:07 +0100234 codeinfo_stats->Child("Header")->AddBits(reader.NumberOfReadBits());
David Srbecky0d4567f2019-05-30 22:45:40 +0100235 size_t num_bits;
236 CodeInfo code_info(code_info_data, &num_bits, [&](size_t i, auto* table, BitMemoryRegion region) {
237 if (!code_info.IsBitTableDeduped(i)) {
238 Stats* table_stats = codeinfo_stats->Child(table->GetName());
239 table_stats->AddBits(region.size_in_bits());
240 const char* const* column_names = table->GetColumnNames();
241 for (size_t c = 0; c < table->NumColumns(); c++) {
242 if (table->NumColumnBits(c) > 0) {
243 Stats* column_stats = table_stats->Child(column_names[c]);
244 column_stats->AddBits(table->NumRows() * table->NumColumnBits(c), table->NumRows());
David Srbecky42deda82018-08-10 11:23:27 +0100245 }
246 }
David Srbecky86decb62018-06-05 06:41:10 +0100247 }
David Srbecky42deda82018-08-10 11:23:27 +0100248 });
David Srbecky0d4567f2019-05-30 22:45:40 +0100249 codeinfo_stats->AddBytes(BitsToBytesRoundUp(num_bits));
David Srbecky86decb62018-06-05 06:41:10 +0100250}
251
David Srbeckye1402122018-06-13 18:20:45 +0100252void DexRegisterMap::Dump(VariableIndentationOutputStream* vios) const {
253 if (HasAnyLiveDexRegisters()) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100254 ScopedIndentation indent1(vios);
David Srbeckye1402122018-06-13 18:20:45 +0100255 for (size_t i = 0; i < size(); ++i) {
256 DexRegisterLocation reg = (*this)[i];
257 if (reg.IsLive()) {
258 vios->Stream() << "v" << i << ":" << reg << " ";
David Srbecky71ec1cc2018-05-18 15:57:25 +0100259 }
260 }
261 vios->Stream() << "\n";
262 }
Roland Levillaina552e1c2015-03-26 15:01:03 +0000263}
264
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100265void CodeInfo::Dump(VariableIndentationOutputStream* vios,
Roland Levillainf2650d12015-05-28 14:53:28 +0100266 uint32_t code_offset,
David Srbecky71ec1cc2018-05-18 15:57:25 +0100267 bool verbose,
David Srbecky8cd54542018-07-15 23:58:44 +0100268 InstructionSet instruction_set) const {
David Srbecky0d4567f2019-05-30 22:45:40 +0100269 vios->Stream() << "CodeInfo "
David Srbecky17b4d2b2021-03-02 18:14:31 +0000270 << " CodeSize:" << code_size_
David Srbecky42deda82018-08-10 11:23:27 +0100271 << " FrameSize:" << packed_frame_size_ * kStackAlignment
272 << " CoreSpillMask:" << std::hex << core_spill_mask_
273 << " FpSpillMask:" << std::hex << fp_spill_mask_
274 << " NumberOfDexRegisters:" << std::dec << number_of_dex_registers_
275 << "\n";
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100276 ScopedIndentation indent1(vios);
David Srbecky697c47a2019-06-16 21:53:07 +0100277 ForEachBitTableField([this, &vios, verbose](size_t, auto member_pointer) {
David Srbecky42deda82018-08-10 11:23:27 +0100278 const auto& table = this->*member_pointer;
279 if (table.NumRows() != 0) {
280 vios->Stream() << table.GetName() << " BitSize=" << table.DataBitSize();
281 vios->Stream() << " Rows=" << table.NumRows() << " Bits={";
282 const char* const* column_names = table.GetColumnNames();
283 for (size_t c = 0; c < table.NumColumns(); c++) {
284 vios->Stream() << (c != 0 ? " " : "");
285 vios->Stream() << column_names[c] << "=" << table.NumColumnBits(c);
286 }
287 vios->Stream() << "}\n";
288 if (verbose) {
289 ScopedIndentation indent1(vios);
290 for (size_t r = 0; r < table.NumRows(); r++) {
291 vios->Stream() << "[" << std::right << std::setw(3) << r << "]={";
292 for (size_t c = 0; c < table.NumColumns(); c++) {
293 vios->Stream() << (c != 0 ? " " : "");
294 if (&table == static_cast<const void*>(&stack_masks_) ||
295 &table == static_cast<const void*>(&dex_register_masks_)) {
296 BitMemoryRegion bits = table.GetBitMemoryRegion(r, c);
297 for (size_t b = 0, e = bits.size_in_bits(); b < e; b++) {
298 vios->Stream() << bits.LoadBit(e - b - 1);
299 }
300 } else {
301 vios->Stream() << std::right << std::setw(8) << static_cast<int32_t>(table.Get(r, c));
302 }
303 }
304 vios->Stream() << "}\n";
305 }
306 }
307 }
308 });
David Srbecky71ec1cc2018-05-18 15:57:25 +0100309
Roland Levillaina552e1c2015-03-26 15:01:03 +0000310 // Display stack maps along with (live) Dex register maps.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100311 if (verbose) {
David Srbecky93bd3612018-07-02 19:30:18 +0100312 for (StackMap stack_map : stack_maps_) {
David Srbecky8cd54542018-07-15 23:58:44 +0100313 stack_map.Dump(vios, *this, code_offset, instruction_set);
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100314 }
315 }
316}
317
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100318void StackMap::Dump(VariableIndentationOutputStream* vios,
Roland Levillainf2650d12015-05-28 14:53:28 +0100319 const CodeInfo& code_info,
320 uint32_t code_offset,
David Srbecky71ec1cc2018-05-18 15:57:25 +0100321 InstructionSet instruction_set) const {
David Srbecky052f8ca2018-04-26 15:42:54 +0100322 const uint32_t pc_offset = GetNativePcOffset(instruction_set);
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100323 vios->Stream()
David Srbecky71ec1cc2018-05-18 15:57:25 +0100324 << "StackMap[" << Row() << "]"
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100325 << std::hex
David Srbecky71ec1cc2018-05-18 15:57:25 +0100326 << " (native_pc=0x" << code_offset + pc_offset
327 << ", dex_pc=0x" << GetDexPc()
David Srbecky052f8ca2018-04-26 15:42:54 +0100328 << ", register_mask=0x" << code_info.GetRegisterMaskOf(*this)
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100329 << std::dec
330 << ", stack_mask=0b";
David Srbecky052f8ca2018-04-26 15:42:54 +0100331 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(*this);
David Srbecky4b59d102018-05-29 21:46:10 +0000332 for (size_t i = 0, e = stack_mask.size_in_bits(); i < e; ++i) {
David Srbecky45aa5982016-03-18 02:15:09 +0000333 vios->Stream() << stack_mask.LoadBit(e - i - 1);
Roland Levillainf2650d12015-05-28 14:53:28 +0100334 }
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100335 vios->Stream() << ")\n";
David Srbeckye1402122018-06-13 18:20:45 +0100336 code_info.GetDexRegisterMapOf(*this).Dump(vios);
David Srbecky93bd3612018-07-02 19:30:18 +0100337 for (InlineInfo inline_info : code_info.GetInlineInfosOf(*this)) {
David Srbecky8cd54542018-07-15 23:58:44 +0100338 inline_info.Dump(vios, code_info, *this);
Nicolas Geoffray12bdb722015-06-17 09:44:43 +0100339 }
Roland Levillainf2650d12015-05-28 14:53:28 +0100340}
341
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100342void InlineInfo::Dump(VariableIndentationOutputStream* vios,
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100343 const CodeInfo& code_info,
David Srbecky8cd54542018-07-15 23:58:44 +0100344 const StackMap& stack_map) const {
David Srbecky6e69e522018-06-03 12:00:14 +0100345 uint32_t depth = Row() - stack_map.GetInlineInfoIndex();
346 vios->Stream()
347 << "InlineInfo[" << Row() << "]"
348 << " (depth=" << depth
349 << std::hex
350 << ", dex_pc=0x" << GetDexPc();
351 if (EncodesArtMethod()) {
352 ScopedObjectAccess soa(Thread::Current());
353 vios->Stream() << ", method=" << GetArtMethod()->PrettyMethod();
354 } else {
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100355 vios->Stream()
David Srbecky6e69e522018-06-03 12:00:14 +0100356 << std::dec
David Srbecky8cd54542018-07-15 23:58:44 +0100357 << ", method_index=" << code_info.GetMethodIndexOf(*this);
David Srbecky6e69e522018-06-03 12:00:14 +0100358 }
359 vios->Stream() << ")\n";
David Srbecky93bd3612018-07-02 19:30:18 +0100360 code_info.GetInlineDexRegisterMapOf(stack_map, *this).Dump(vios);
Nicolas Geoffray004c2302015-03-20 10:06:38 +0000361}
362
363} // namespace art