Elliott Hughes | 68e7652 | 2011-10-05 13:22:16 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "stack.h" |
Alex Light | b096c91 | 2019-09-25 13:33:06 -0700 | [diff] [blame] | 18 | #include <limits> |
Elliott Hughes | 68e7652 | 2011-10-05 13:22:16 -0700 | [diff] [blame] | 19 | |
Andreas Gampe | 46ee31b | 2016-12-14 10:11:49 -0800 | [diff] [blame] | 20 | #include "android-base/stringprintf.h" |
| 21 | |
Ian Rogers | e63db27 | 2014-07-15 15:36:11 -0700 | [diff] [blame] | 22 | #include "arch/context.h" |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 23 | #include "art_method-inl.h" |
Andreas Gampe | 8228cdf | 2017-05-30 15:03:54 -0700 | [diff] [blame] | 24 | #include "base/callee_save_type.h" |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 25 | #include "base/enums.h" |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 26 | #include "base/hex_dump.h" |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 27 | #include "dex/dex_file_types.h" |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 28 | #include "entrypoints/entrypoint_utils-inl.h" |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 29 | #include "entrypoints/quick/callee_save_frame.h" |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 30 | #include "entrypoints/runtime_asm_entrypoints.h" |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 31 | #include "gc/space/image_space.h" |
| 32 | #include "gc/space/space-inl.h" |
Vladimir Marko | 6ec2a1b | 2018-05-22 15:33:48 +0100 | [diff] [blame] | 33 | #include "interpreter/shadow_frame-inl.h" |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 34 | #include "jit/jit.h" |
| 35 | #include "jit/jit_code_cache.h" |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 36 | #include "linear_alloc.h" |
Andreas Gampe | 513061a | 2017-06-01 09:17:34 -0700 | [diff] [blame] | 37 | #include "managed_stack.h" |
Ian Rogers | 4f6ad8a | 2013-03-18 15:27:28 -0700 | [diff] [blame] | 38 | #include "mirror/class-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 39 | #include "mirror/object-inl.h" |
| 40 | #include "mirror/object_array-inl.h" |
Nicolas Geoffray | 013d1ee | 2019-12-04 16:18:15 +0000 | [diff] [blame] | 41 | #include "nterp_helpers.h" |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 42 | #include "oat_quick_method_header.h" |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 43 | #include "obj_ptr-inl.h" |
Vladimir Marko | 7624d25 | 2014-05-02 14:40:15 +0100 | [diff] [blame] | 44 | #include "quick/quick_method_frame_info.h" |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 45 | #include "runtime.h" |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 46 | #include "thread.h" |
Elliott Hughes | bfe487b | 2011-10-26 15:48:55 -0700 | [diff] [blame] | 47 | #include "thread_list.h" |
Elliott Hughes | 68e7652 | 2011-10-05 13:22:16 -0700 | [diff] [blame] | 48 | |
Elliott Hughes | 11d1b0c | 2012-01-23 16:57:47 -0800 | [diff] [blame] | 49 | namespace art { |
| 50 | |
Andreas Gampe | 46ee31b | 2016-12-14 10:11:49 -0800 | [diff] [blame] | 51 | using android::base::StringPrintf; |
| 52 | |
Mathieu Chartier | 8405bfd | 2016-02-05 12:00:49 -0800 | [diff] [blame] | 53 | static constexpr bool kDebugStackWalk = false; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 54 | |
Hiroshi Yamauchi | 02f365f | 2017-02-03 15:06:00 -0800 | [diff] [blame] | 55 | StackVisitor::StackVisitor(Thread* thread, |
| 56 | Context* context, |
| 57 | StackWalkKind walk_kind, |
| 58 | bool check_suspended) |
| 59 | : StackVisitor(thread, context, walk_kind, 0, check_suspended) {} |
Ian Rogers | 7a22fa6 | 2013-01-23 12:16:16 -0800 | [diff] [blame] | 60 | |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 61 | StackVisitor::StackVisitor(Thread* thread, |
| 62 | Context* context, |
| 63 | StackWalkKind walk_kind, |
Hiroshi Yamauchi | 02f365f | 2017-02-03 15:06:00 -0800 | [diff] [blame] | 64 | size_t num_frames, |
| 65 | bool check_suspended) |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 66 | : thread_(thread), |
| 67 | walk_kind_(walk_kind), |
| 68 | cur_shadow_frame_(nullptr), |
| 69 | cur_quick_frame_(nullptr), |
| 70 | cur_quick_frame_pc_(0), |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 71 | cur_oat_quick_method_header_(nullptr), |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 72 | num_frames_(num_frames), |
| 73 | cur_depth_(0), |
David Srbecky | 145a18a | 2019-06-03 14:35:22 +0100 | [diff] [blame] | 74 | cur_inline_info_(nullptr, CodeInfo()), |
| 75 | cur_stack_map_(0, StackMap()), |
Hiroshi Yamauchi | 02f365f | 2017-02-03 15:06:00 -0800 | [diff] [blame] | 76 | context_(context), |
| 77 | check_suspended_(check_suspended) { |
| 78 | if (check_suspended_) { |
| 79 | DCHECK(thread == Thread::Current() || thread->IsSuspended()) << *thread; |
| 80 | } |
Ian Rogers | 5cf9819 | 2014-05-29 21:31:50 -0700 | [diff] [blame] | 81 | } |
| 82 | |
David Srbecky | 145a18a | 2019-06-03 14:35:22 +0100 | [diff] [blame] | 83 | CodeInfo* StackVisitor::GetCurrentInlineInfo() const { |
| 84 | DCHECK(!(*cur_quick_frame_)->IsNative()); |
| 85 | const OatQuickMethodHeader* header = GetCurrentOatQuickMethodHeader(); |
| 86 | if (cur_inline_info_.first != header) { |
David Srbecky | 0d4567f | 2019-05-30 22:45:40 +0100 | [diff] [blame] | 87 | cur_inline_info_ = std::make_pair(header, CodeInfo::DecodeInlineInfoOnly(header)); |
David Srbecky | 145a18a | 2019-06-03 14:35:22 +0100 | [diff] [blame] | 88 | } |
| 89 | return &cur_inline_info_.second; |
| 90 | } |
| 91 | |
| 92 | StackMap* StackVisitor::GetCurrentStackMap() const { |
| 93 | DCHECK(!(*cur_quick_frame_)->IsNative()); |
| 94 | const OatQuickMethodHeader* header = GetCurrentOatQuickMethodHeader(); |
| 95 | if (cur_stack_map_.first != cur_quick_frame_pc_) { |
| 96 | uint32_t pc = header->NativeQuickPcOffset(cur_quick_frame_pc_); |
| 97 | cur_stack_map_ = std::make_pair(cur_quick_frame_pc_, |
| 98 | GetCurrentInlineInfo()->GetStackMapForNativePcOffset(pc)); |
| 99 | } |
| 100 | return &cur_stack_map_.second; |
| 101 | } |
| 102 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 103 | ArtMethod* StackVisitor::GetMethod() const { |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 104 | if (cur_shadow_frame_ != nullptr) { |
| 105 | return cur_shadow_frame_->GetMethod(); |
| 106 | } else if (cur_quick_frame_ != nullptr) { |
| 107 | if (IsInInlinedFrame()) { |
David Srbecky | 145a18a | 2019-06-03 14:35:22 +0100 | [diff] [blame] | 108 | CodeInfo* code_info = GetCurrentInlineInfo(); |
Mathieu Chartier | 45bf250 | 2016-03-31 11:07:09 -0700 | [diff] [blame] | 109 | DCHECK(walk_kind_ != StackWalkKind::kSkipInlinedFrames); |
David Srbecky | 145a18a | 2019-06-03 14:35:22 +0100 | [diff] [blame] | 110 | return GetResolvedMethod(*GetCurrentQuickFrame(), *code_info, current_inline_frames_); |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 111 | } else { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 112 | return *cur_quick_frame_; |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 113 | } |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 114 | } |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 115 | return nullptr; |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 116 | } |
| 117 | |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 118 | uint32_t StackVisitor::GetDexPc(bool abort_on_failure) const { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 119 | if (cur_shadow_frame_ != nullptr) { |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 120 | return cur_shadow_frame_->GetDexPC(); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 121 | } else if (cur_quick_frame_ != nullptr) { |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 122 | if (IsInInlinedFrame()) { |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 123 | return current_inline_frames_.back().GetDexPc(); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 124 | } else if (cur_oat_quick_method_header_ == nullptr) { |
Andreas Gampe | e2abbc6 | 2017-09-15 11:59:26 -0700 | [diff] [blame] | 125 | return dex::kDexNoIndex; |
Nicolas Geoffray | 013d1ee | 2019-12-04 16:18:15 +0000 | [diff] [blame] | 126 | } else if ((*GetCurrentQuickFrame())->IsNative()) { |
| 127 | return cur_oat_quick_method_header_->ToDexPc( |
| 128 | GetCurrentQuickFrame(), cur_quick_frame_pc_, abort_on_failure); |
| 129 | } else if (cur_oat_quick_method_header_->IsOptimized()) { |
David Srbecky | 145a18a | 2019-06-03 14:35:22 +0100 | [diff] [blame] | 130 | StackMap* stack_map = GetCurrentStackMap(); |
| 131 | DCHECK(stack_map->IsValid()); |
| 132 | return stack_map->GetDexPc(); |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 133 | } else { |
Nicolas Geoffray | 013d1ee | 2019-12-04 16:18:15 +0000 | [diff] [blame] | 134 | DCHECK(cur_oat_quick_method_header_->IsNterpMethodHeader()); |
| 135 | return NterpGetDexPC(cur_quick_frame_); |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 136 | } |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 137 | } else { |
| 138 | return 0; |
| 139 | } |
| 140 | } |
| 141 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 142 | extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 143 | REQUIRES_SHARED(Locks::mutator_lock_); |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 144 | |
Vladimir Marko | abedfca | 2019-05-23 14:07:47 +0100 | [diff] [blame] | 145 | ObjPtr<mirror::Object> StackVisitor::GetThisObject() const { |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 146 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 147 | ArtMethod* m = GetMethod(); |
Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 148 | if (m->IsStatic()) { |
Nicolas Geoffray | 3946844 | 2014-09-02 15:17:15 +0100 | [diff] [blame] | 149 | return nullptr; |
Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 150 | } else if (m->IsNative()) { |
Nicolas Geoffray | 3946844 | 2014-09-02 15:17:15 +0100 | [diff] [blame] | 151 | if (cur_quick_frame_ != nullptr) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 152 | HandleScope* hs = reinterpret_cast<HandleScope*>( |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 153 | reinterpret_cast<char*>(cur_quick_frame_) + sizeof(ArtMethod*)); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 154 | return hs->GetReference(0); |
Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 155 | } else { |
| 156 | return cur_shadow_frame_->GetVRegReference(0); |
| 157 | } |
Nicolas Geoffray | 3a09092 | 2015-11-24 09:17:30 +0000 | [diff] [blame] | 158 | } else if (m->IsProxyMethod()) { |
Sebastien Hertz | a836bc9 | 2014-11-25 16:30:53 +0100 | [diff] [blame] | 159 | if (cur_quick_frame_ != nullptr) { |
| 160 | return artQuickGetProxyThisObject(cur_quick_frame_); |
| 161 | } else { |
| 162 | return cur_shadow_frame_->GetVRegReference(0); |
| 163 | } |
Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 164 | } else { |
David Sehr | 0225f8e | 2018-01-31 08:52:24 +0000 | [diff] [blame] | 165 | CodeItemDataAccessor accessor(m->DexInstructionData()); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 166 | if (!accessor.HasCodeItem()) { |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 167 | UNIMPLEMENTED(ERROR) << "Failed to determine this object of abstract or proxy method: " |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 168 | << ArtMethod::PrettyMethod(m); |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 169 | return nullptr; |
Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 170 | } else { |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 171 | uint16_t reg = accessor.RegistersSize() - accessor.InsSize(); |
Nicolas Geoffray | 15b9d52 | 2015-03-12 15:05:13 +0000 | [diff] [blame] | 172 | uint32_t value = 0; |
Nicolas Geoffray | 4cbfadc | 2018-10-10 16:09:43 +0100 | [diff] [blame] | 173 | if (!GetVReg(m, reg, kReferenceVReg, &value)) { |
| 174 | return nullptr; |
| 175 | } |
Nicolas Geoffray | 15b9d52 | 2015-03-12 15:05:13 +0000 | [diff] [blame] | 176 | return reinterpret_cast<mirror::Object*>(value); |
Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 177 | } |
| 178 | } |
| 179 | } |
| 180 | |
Ian Rogers | 0c7abda | 2012-09-19 13:33:42 -0700 | [diff] [blame] | 181 | size_t StackVisitor::GetNativePcOffset() const { |
| 182 | DCHECK(!IsShadowFrame()); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 183 | return GetCurrentOatQuickMethodHeader()->NativeQuickPcOffset(cur_quick_frame_pc_); |
Ian Rogers | 0c7abda | 2012-09-19 13:33:42 -0700 | [diff] [blame] | 184 | } |
| 185 | |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 186 | bool StackVisitor::GetVRegFromDebuggerShadowFrame(uint16_t vreg, |
| 187 | VRegKind kind, |
| 188 | uint32_t* val) const { |
| 189 | size_t frame_id = const_cast<StackVisitor*>(this)->GetFrameId(); |
| 190 | ShadowFrame* shadow_frame = thread_->FindDebuggerShadowFrame(frame_id); |
| 191 | if (shadow_frame != nullptr) { |
| 192 | bool* updated_vreg_flags = thread_->GetUpdatedVRegFlags(frame_id); |
| 193 | DCHECK(updated_vreg_flags != nullptr); |
| 194 | if (updated_vreg_flags[vreg]) { |
| 195 | // Value is set by the debugger. |
| 196 | if (kind == kReferenceVReg) { |
| 197 | *val = static_cast<uint32_t>(reinterpret_cast<uintptr_t>( |
| 198 | shadow_frame->GetVRegReference(vreg))); |
| 199 | } else { |
| 200 | *val = shadow_frame->GetVReg(vreg); |
| 201 | } |
| 202 | return true; |
| 203 | } |
| 204 | } |
| 205 | // No value is set by the debugger. |
| 206 | return false; |
| 207 | } |
| 208 | |
David Srbecky | cffa254 | 2019-07-01 15:31:41 +0100 | [diff] [blame] | 209 | bool StackVisitor::GetVReg(ArtMethod* m, |
| 210 | uint16_t vreg, |
| 211 | VRegKind kind, |
| 212 | uint32_t* val, |
| 213 | std::optional<DexRegisterLocation> location) const { |
Sebastien Hertz | c901dd7 | 2014-07-16 11:56:07 +0200 | [diff] [blame] | 214 | if (cur_quick_frame_ != nullptr) { |
| 215 | DCHECK(context_ != nullptr); // You can't reliably read registers without a context. |
Ian Rogers | 2bcb4a4 | 2012-11-08 10:39:18 -0800 | [diff] [blame] | 216 | DCHECK(m == GetMethod()); |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 217 | // Check if there is value set by the debugger. |
| 218 | if (GetVRegFromDebuggerShadowFrame(vreg, kind, val)) { |
| 219 | return true; |
| 220 | } |
Nicolas Geoffray | d7651b1 | 2019-12-18 14:57:30 +0000 | [diff] [blame] | 221 | bool result = false; |
Nicolas Geoffray | 013d1ee | 2019-12-04 16:18:15 +0000 | [diff] [blame] | 222 | if (cur_oat_quick_method_header_->IsNterpMethodHeader()) { |
Nicolas Geoffray | d7651b1 | 2019-12-18 14:57:30 +0000 | [diff] [blame] | 223 | result = true; |
| 224 | *val = (kind == kReferenceVReg) |
| 225 | ? NterpGetVRegReference(cur_quick_frame_, vreg) |
| 226 | : NterpGetVReg(cur_quick_frame_, vreg); |
| 227 | } else { |
| 228 | DCHECK(cur_oat_quick_method_header_->IsOptimized()); |
| 229 | if (location.has_value() && kind != kReferenceVReg) { |
| 230 | uint32_t val2 = *val; |
| 231 | // The caller already known the register location, so we can use the faster overload |
| 232 | // which does not decode the stack maps. |
| 233 | result = GetVRegFromOptimizedCode(location.value(), kind, val); |
| 234 | // Compare to the slower overload. |
| 235 | DCHECK_EQ(result, GetVRegFromOptimizedCode(m, vreg, kind, &val2)); |
| 236 | DCHECK_EQ(*val, val2); |
| 237 | } else { |
| 238 | result = GetVRegFromOptimizedCode(m, vreg, kind, val); |
| 239 | } |
Nicolas Geoffray | 013d1ee | 2019-12-04 16:18:15 +0000 | [diff] [blame] | 240 | } |
Alex Light | b096c91 | 2019-09-25 13:33:06 -0700 | [diff] [blame] | 241 | if (kind == kReferenceVReg) { |
| 242 | // Perform a read barrier in case we are in a different thread and GC is ongoing. |
| 243 | mirror::Object* out = reinterpret_cast<mirror::Object*>(static_cast<uintptr_t>(*val)); |
| 244 | uintptr_t ptr_out = reinterpret_cast<uintptr_t>(GcRoot<mirror::Object>(out).Read()); |
| 245 | DCHECK_LT(ptr_out, std::numeric_limits<uint32_t>::max()); |
| 246 | *val = static_cast<uint32_t>(ptr_out); |
| 247 | } |
Nicolas Geoffray | d7651b1 | 2019-12-18 14:57:30 +0000 | [diff] [blame] | 248 | return result; |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 249 | } else { |
Sebastien Hertz | 96ba8dc | 2015-01-22 18:57:14 +0100 | [diff] [blame] | 250 | DCHECK(cur_shadow_frame_ != nullptr); |
Sebastien Hertz | 0968744 | 2015-11-17 10:35:39 +0100 | [diff] [blame] | 251 | if (kind == kReferenceVReg) { |
| 252 | *val = static_cast<uint32_t>(reinterpret_cast<uintptr_t>( |
| 253 | cur_shadow_frame_->GetVRegReference(vreg))); |
| 254 | } else { |
| 255 | *val = cur_shadow_frame_->GetVReg(vreg); |
| 256 | } |
Sebastien Hertz | 0bcb290 | 2014-06-17 15:52:45 +0200 | [diff] [blame] | 257 | return true; |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 258 | } |
| 259 | } |
| 260 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 261 | bool StackVisitor::GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKind kind, |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 262 | uint32_t* val) const { |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 263 | DCHECK_EQ(m, GetMethod()); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 264 | // Can't be null or how would we compile its instructions? |
| 265 | DCHECK(m->GetCodeItem() != nullptr) << m->PrettyMethod(); |
David Sehr | 0225f8e | 2018-01-31 08:52:24 +0000 | [diff] [blame] | 266 | CodeItemDataAccessor accessor(m->DexInstructionData()); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 267 | uint16_t number_of_dex_registers = accessor.RegistersSize(); |
| 268 | DCHECK_LT(vreg, number_of_dex_registers); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 269 | const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader(); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 270 | CodeInfo code_info(method_header); |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 271 | |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 272 | uint32_t native_pc_offset = method_header->NativeQuickPcOffset(cur_quick_frame_pc_); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 273 | StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset); |
Nicolas Geoffray | e12997f | 2015-05-22 14:01:33 +0100 | [diff] [blame] | 274 | DCHECK(stack_map.IsValid()); |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 275 | |
| 276 | DexRegisterMap dex_register_map = IsInInlinedFrame() |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 277 | ? code_info.GetInlineDexRegisterMapOf(stack_map, current_inline_frames_.back()) |
David Srbecky | fd89b07 | 2018-06-03 12:00:22 +0100 | [diff] [blame] | 278 | : code_info.GetDexRegisterMapOf(stack_map); |
| 279 | if (dex_register_map.empty()) { |
Nicolas Geoffray | 012fc4e | 2016-01-08 15:58:19 +0000 | [diff] [blame] | 280 | return false; |
| 281 | } |
David Srbecky | fd89b07 | 2018-06-03 12:00:22 +0100 | [diff] [blame] | 282 | DCHECK_EQ(dex_register_map.size(), number_of_dex_registers); |
David Srbecky | e140212 | 2018-06-13 18:20:45 +0100 | [diff] [blame] | 283 | DexRegisterLocation::Kind location_kind = dex_register_map[vreg].GetKind(); |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 284 | switch (location_kind) { |
Roland Levillain | a2d8ec6 | 2015-03-12 15:25:29 +0000 | [diff] [blame] | 285 | case DexRegisterLocation::Kind::kInStack: { |
David Srbecky | e140212 | 2018-06-13 18:20:45 +0100 | [diff] [blame] | 286 | const int32_t offset = dex_register_map[vreg].GetStackOffsetInBytes(); |
Nicolas Geoffray | 4cbfadc | 2018-10-10 16:09:43 +0100 | [diff] [blame] | 287 | BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map); |
| 288 | if (kind == kReferenceVReg && !stack_mask.LoadBit(offset / kFrameSlotSize)) { |
| 289 | return false; |
| 290 | } |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 291 | const uint8_t* addr = reinterpret_cast<const uint8_t*>(cur_quick_frame_) + offset; |
| 292 | *val = *reinterpret_cast<const uint32_t*>(addr); |
| 293 | return true; |
| 294 | } |
Nicolas Geoffray | 4cbfadc | 2018-10-10 16:09:43 +0100 | [diff] [blame] | 295 | case DexRegisterLocation::Kind::kInRegister: { |
| 296 | uint32_t register_mask = code_info.GetRegisterMaskOf(stack_map); |
| 297 | uint32_t reg = dex_register_map[vreg].GetMachineRegister(); |
| 298 | if (kind == kReferenceVReg && !(register_mask & (1 << reg))) { |
| 299 | return false; |
| 300 | } |
| 301 | return GetRegisterIfAccessible(reg, kind, val); |
| 302 | } |
David Brazdil | d9cb68e | 2015-08-25 13:52:43 +0100 | [diff] [blame] | 303 | case DexRegisterLocation::Kind::kInRegisterHigh: |
| 304 | case DexRegisterLocation::Kind::kInFpuRegister: |
| 305 | case DexRegisterLocation::Kind::kInFpuRegisterHigh: { |
Nicolas Geoffray | 4cbfadc | 2018-10-10 16:09:43 +0100 | [diff] [blame] | 306 | if (kind == kReferenceVReg) { |
| 307 | return false; |
| 308 | } |
David Srbecky | e140212 | 2018-06-13 18:20:45 +0100 | [diff] [blame] | 309 | uint32_t reg = dex_register_map[vreg].GetMachineRegister(); |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 310 | return GetRegisterIfAccessible(reg, kind, val); |
| 311 | } |
Nicolas Geoffray | 4cbfadc | 2018-10-10 16:09:43 +0100 | [diff] [blame] | 312 | case DexRegisterLocation::Kind::kConstant: { |
| 313 | uint32_t result = dex_register_map[vreg].GetConstant(); |
| 314 | if (kind == kReferenceVReg && result != 0) { |
| 315 | return false; |
| 316 | } |
| 317 | *val = result; |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 318 | return true; |
Nicolas Geoffray | 4cbfadc | 2018-10-10 16:09:43 +0100 | [diff] [blame] | 319 | } |
Roland Levillain | a2d8ec6 | 2015-03-12 15:25:29 +0000 | [diff] [blame] | 320 | case DexRegisterLocation::Kind::kNone: |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 321 | return false; |
Roland Levillain | a2d8ec6 | 2015-03-12 15:25:29 +0000 | [diff] [blame] | 322 | default: |
David Srbecky | e140212 | 2018-06-13 18:20:45 +0100 | [diff] [blame] | 323 | LOG(FATAL) << "Unexpected location kind " << dex_register_map[vreg].GetKind(); |
Roland Levillain | a2d8ec6 | 2015-03-12 15:25:29 +0000 | [diff] [blame] | 324 | UNREACHABLE(); |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 325 | } |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 326 | } |
| 327 | |
David Srbecky | cffa254 | 2019-07-01 15:31:41 +0100 | [diff] [blame] | 328 | bool StackVisitor::GetVRegFromOptimizedCode(DexRegisterLocation location, |
| 329 | VRegKind kind, |
| 330 | uint32_t* val) const { |
| 331 | switch (location.GetKind()) { |
| 332 | case DexRegisterLocation::Kind::kInvalid: |
| 333 | break; |
| 334 | case DexRegisterLocation::Kind::kInStack: { |
| 335 | const uint8_t* sp = reinterpret_cast<const uint8_t*>(cur_quick_frame_); |
| 336 | *val = *reinterpret_cast<const uint32_t*>(sp + location.GetStackOffsetInBytes()); |
| 337 | return true; |
| 338 | } |
| 339 | case DexRegisterLocation::Kind::kInRegister: |
| 340 | case DexRegisterLocation::Kind::kInRegisterHigh: |
| 341 | case DexRegisterLocation::Kind::kInFpuRegister: |
| 342 | case DexRegisterLocation::Kind::kInFpuRegisterHigh: |
| 343 | return GetRegisterIfAccessible(location.GetMachineRegister(), kind, val); |
| 344 | case DexRegisterLocation::Kind::kConstant: |
| 345 | *val = location.GetConstant(); |
| 346 | return true; |
| 347 | case DexRegisterLocation::Kind::kNone: |
| 348 | return false; |
| 349 | } |
| 350 | LOG(FATAL) << "Unexpected location kind " << location.GetKind(); |
| 351 | UNREACHABLE(); |
| 352 | } |
| 353 | |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 354 | bool StackVisitor::GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const { |
| 355 | const bool is_float = (kind == kFloatVReg) || (kind == kDoubleLoVReg) || (kind == kDoubleHiVReg); |
David Brazdil | 77a48ae | 2015-09-15 12:34:04 +0000 | [diff] [blame] | 356 | |
Vladimir Marko | 239d6ea | 2016-09-05 10:44:04 +0100 | [diff] [blame] | 357 | if (kRuntimeISA == InstructionSet::kX86 && is_float) { |
| 358 | // X86 float registers are 64-bit and each XMM register is provided as two separate |
| 359 | // 32-bit registers by the context. |
| 360 | reg = (kind == kDoubleHiVReg) ? (2 * reg + 1) : (2 * reg); |
| 361 | } |
David Brazdil | 77a48ae | 2015-09-15 12:34:04 +0000 | [diff] [blame] | 362 | |
Goran Jakovljevic | 986660c | 2015-12-10 11:44:50 +0100 | [diff] [blame] | 363 | // MIPS32 float registers are used as 64-bit (for MIPS32r2 it is pair |
| 364 | // F(2n)-F(2n+1), and for MIPS32r6 it is 64-bit register F(2n)). When |
| 365 | // accessing upper 32-bits from double, reg + 1 should be used. |
| 366 | if ((kRuntimeISA == InstructionSet::kMips) && (kind == kDoubleHiVReg)) { |
| 367 | DCHECK_ALIGNED(reg, 2); |
| 368 | reg++; |
| 369 | } |
| 370 | |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 371 | if (!IsAccessibleRegister(reg, is_float)) { |
| 372 | return false; |
| 373 | } |
| 374 | uintptr_t ptr_val = GetRegister(reg, is_float); |
| 375 | const bool target64 = Is64BitInstructionSet(kRuntimeISA); |
| 376 | if (target64) { |
| 377 | const bool wide_lo = (kind == kLongLoVReg) || (kind == kDoubleLoVReg); |
| 378 | const bool wide_hi = (kind == kLongHiVReg) || (kind == kDoubleHiVReg); |
| 379 | int64_t value_long = static_cast<int64_t>(ptr_val); |
| 380 | if (wide_lo) { |
| 381 | ptr_val = static_cast<uintptr_t>(Low32Bits(value_long)); |
| 382 | } else if (wide_hi) { |
| 383 | ptr_val = static_cast<uintptr_t>(High32Bits(value_long)); |
| 384 | } |
| 385 | } |
| 386 | *val = ptr_val; |
| 387 | return true; |
| 388 | } |
| 389 | |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 390 | bool StackVisitor::GetVRegPairFromDebuggerShadowFrame(uint16_t vreg, |
| 391 | VRegKind kind_lo, |
| 392 | VRegKind kind_hi, |
| 393 | uint64_t* val) const { |
| 394 | uint32_t low_32bits; |
| 395 | uint32_t high_32bits; |
| 396 | bool success = GetVRegFromDebuggerShadowFrame(vreg, kind_lo, &low_32bits); |
| 397 | success &= GetVRegFromDebuggerShadowFrame(vreg + 1, kind_hi, &high_32bits); |
| 398 | if (success) { |
| 399 | *val = (static_cast<uint64_t>(high_32bits) << 32) | static_cast<uint64_t>(low_32bits); |
| 400 | } |
| 401 | return success; |
| 402 | } |
| 403 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 404 | bool StackVisitor::GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, |
Sebastien Hertz | c901dd7 | 2014-07-16 11:56:07 +0200 | [diff] [blame] | 405 | VRegKind kind_hi, uint64_t* val) const { |
| 406 | if (kind_lo == kLongLoVReg) { |
| 407 | DCHECK_EQ(kind_hi, kLongHiVReg); |
| 408 | } else if (kind_lo == kDoubleLoVReg) { |
| 409 | DCHECK_EQ(kind_hi, kDoubleHiVReg); |
| 410 | } else { |
| 411 | LOG(FATAL) << "Expected long or double: kind_lo=" << kind_lo << ", kind_hi=" << kind_hi; |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 412 | UNREACHABLE(); |
Sebastien Hertz | c901dd7 | 2014-07-16 11:56:07 +0200 | [diff] [blame] | 413 | } |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 414 | // Check if there is value set by the debugger. |
| 415 | if (GetVRegPairFromDebuggerShadowFrame(vreg, kind_lo, kind_hi, val)) { |
| 416 | return true; |
| 417 | } |
Nicolas Geoffray | caafd62 | 2020-01-27 13:08:45 +0000 | [diff] [blame] | 418 | if (cur_quick_frame_ == nullptr) { |
Sebastien Hertz | 96ba8dc | 2015-01-22 18:57:14 +0100 | [diff] [blame] | 419 | DCHECK(cur_shadow_frame_ != nullptr); |
Sebastien Hertz | c901dd7 | 2014-07-16 11:56:07 +0200 | [diff] [blame] | 420 | *val = cur_shadow_frame_->GetVRegLong(vreg); |
| 421 | return true; |
| 422 | } |
Nicolas Geoffray | caafd62 | 2020-01-27 13:08:45 +0000 | [diff] [blame] | 423 | if (cur_oat_quick_method_header_->IsNterpMethodHeader()) { |
| 424 | uint64_t val_lo = NterpGetVReg(cur_quick_frame_, vreg); |
| 425 | uint64_t val_hi = NterpGetVReg(cur_quick_frame_, vreg + 1); |
| 426 | *val = (val_hi << 32) + val_lo; |
| 427 | return true; |
| 428 | } |
| 429 | |
| 430 | DCHECK(context_ != nullptr); // You can't reliably read registers without a context. |
| 431 | DCHECK(m == GetMethod()); |
| 432 | DCHECK(cur_oat_quick_method_header_->IsOptimized()); |
| 433 | return GetVRegPairFromOptimizedCode(m, vreg, kind_lo, kind_hi, val); |
Sebastien Hertz | c901dd7 | 2014-07-16 11:56:07 +0200 | [diff] [blame] | 434 | } |
| 435 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 436 | bool StackVisitor::GetVRegPairFromOptimizedCode(ArtMethod* m, uint16_t vreg, |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 437 | VRegKind kind_lo, VRegKind kind_hi, |
| 438 | uint64_t* val) const { |
| 439 | uint32_t low_32bits; |
| 440 | uint32_t high_32bits; |
Igor Murashkin | b1d8c31 | 2015-08-04 11:18:43 -0700 | [diff] [blame] | 441 | bool success = GetVRegFromOptimizedCode(m, vreg, kind_lo, &low_32bits); |
| 442 | success &= GetVRegFromOptimizedCode(m, vreg + 1, kind_hi, &high_32bits); |
Sebastien Hertz | 7cde48c | 2015-01-20 16:06:43 +0100 | [diff] [blame] | 443 | if (success) { |
| 444 | *val = (static_cast<uint64_t>(high_32bits) << 32) | static_cast<uint64_t>(low_32bits); |
| 445 | } |
| 446 | return success; |
| 447 | } |
| 448 | |
| 449 | bool StackVisitor::GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi, |
| 450 | VRegKind kind_lo, uint64_t* val) const { |
| 451 | const bool is_float = (kind_lo == kDoubleLoVReg); |
| 452 | if (!IsAccessibleRegister(reg_lo, is_float) || !IsAccessibleRegister(reg_hi, is_float)) { |
| 453 | return false; |
| 454 | } |
| 455 | uintptr_t ptr_val_lo = GetRegister(reg_lo, is_float); |
| 456 | uintptr_t ptr_val_hi = GetRegister(reg_hi, is_float); |
| 457 | bool target64 = Is64BitInstructionSet(kRuntimeISA); |
| 458 | if (target64) { |
| 459 | int64_t value_long_lo = static_cast<int64_t>(ptr_val_lo); |
| 460 | int64_t value_long_hi = static_cast<int64_t>(ptr_val_hi); |
| 461 | ptr_val_lo = static_cast<uintptr_t>(Low32Bits(value_long_lo)); |
| 462 | ptr_val_hi = static_cast<uintptr_t>(High32Bits(value_long_hi)); |
| 463 | } |
| 464 | *val = (static_cast<uint64_t>(ptr_val_hi) << 32) | static_cast<uint32_t>(ptr_val_lo); |
| 465 | return true; |
| 466 | } |
| 467 | |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 468 | ShadowFrame* StackVisitor::PrepareSetVReg(ArtMethod* m, uint16_t vreg, bool wide) { |
David Sehr | 0225f8e | 2018-01-31 08:52:24 +0000 | [diff] [blame] | 469 | CodeItemDataAccessor accessor(m->DexInstructionData()); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 470 | if (!accessor.HasCodeItem()) { |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 471 | return nullptr; |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 472 | } |
| 473 | ShadowFrame* shadow_frame = GetCurrentShadowFrame(); |
| 474 | if (shadow_frame == nullptr) { |
| 475 | // This is a compiled frame: we must prepare and update a shadow frame that will |
| 476 | // be executed by the interpreter after deoptimization of the stack. |
| 477 | const size_t frame_id = GetFrameId(); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 478 | const uint16_t num_regs = accessor.RegistersSize(); |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 479 | shadow_frame = thread_->FindOrCreateDebuggerShadowFrame(frame_id, num_regs, m, GetDexPc()); |
| 480 | CHECK(shadow_frame != nullptr); |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 481 | // Remember the vreg(s) has been set for debugging and must not be overwritten by the |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 482 | // original value during deoptimization of the stack. |
| 483 | thread_->GetUpdatedVRegFlags(frame_id)[vreg] = true; |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 484 | if (wide) { |
| 485 | thread_->GetUpdatedVRegFlags(frame_id)[vreg + 1] = true; |
| 486 | } |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 487 | } |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 488 | return shadow_frame; |
| 489 | } |
| 490 | |
| 491 | bool StackVisitor::SetVReg(ArtMethod* m, uint16_t vreg, uint32_t new_value, VRegKind kind) { |
| 492 | DCHECK(kind == kIntVReg || kind == kFloatVReg); |
| 493 | ShadowFrame* shadow_frame = PrepareSetVReg(m, vreg, /* wide= */ false); |
| 494 | if (shadow_frame == nullptr) { |
| 495 | return false; |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 496 | } |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 497 | shadow_frame->SetVReg(vreg, new_value); |
| 498 | return true; |
| 499 | } |
| 500 | |
| 501 | bool StackVisitor::SetVRegReference(ArtMethod* m, uint16_t vreg, ObjPtr<mirror::Object> new_value) { |
| 502 | ShadowFrame* shadow_frame = PrepareSetVReg(m, vreg, /* wide= */ false); |
| 503 | if (shadow_frame == nullptr) { |
| 504 | return false; |
| 505 | } |
| 506 | shadow_frame->SetVRegReference(vreg, new_value); |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 507 | return true; |
| 508 | } |
| 509 | |
Mingyao Yang | 636b925 | 2015-07-31 16:40:24 -0700 | [diff] [blame] | 510 | bool StackVisitor::SetVRegPair(ArtMethod* m, |
| 511 | uint16_t vreg, |
| 512 | uint64_t new_value, |
| 513 | VRegKind kind_lo, |
| 514 | VRegKind kind_hi) { |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 515 | if (kind_lo == kLongLoVReg) { |
| 516 | DCHECK_EQ(kind_hi, kLongHiVReg); |
| 517 | } else if (kind_lo == kDoubleLoVReg) { |
| 518 | DCHECK_EQ(kind_hi, kDoubleHiVReg); |
| 519 | } else { |
| 520 | LOG(FATAL) << "Expected long or double: kind_lo=" << kind_lo << ", kind_hi=" << kind_hi; |
| 521 | UNREACHABLE(); |
| 522 | } |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 523 | ShadowFrame* shadow_frame = PrepareSetVReg(m, vreg, /* wide= */ true); |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 524 | if (shadow_frame == nullptr) { |
Vladimir Marko | 439d126 | 2019-04-12 14:45:07 +0100 | [diff] [blame] | 525 | return false; |
Mingyao Yang | 99170c6 | 2015-07-06 11:10:37 -0700 | [diff] [blame] | 526 | } |
| 527 | shadow_frame->SetVRegLong(vreg, new_value); |
| 528 | return true; |
| 529 | } |
| 530 | |
Sebastien Hertz | 96ba8dc | 2015-01-22 18:57:14 +0100 | [diff] [blame] | 531 | bool StackVisitor::IsAccessibleGPR(uint32_t reg) const { |
| 532 | DCHECK(context_ != nullptr); |
| 533 | return context_->IsAccessibleGPR(reg); |
| 534 | } |
| 535 | |
Mathieu Chartier | 815873e | 2014-02-13 18:02:13 -0800 | [diff] [blame] | 536 | uintptr_t* StackVisitor::GetGPRAddress(uint32_t reg) const { |
Sebastien Hertz | 96ba8dc | 2015-01-22 18:57:14 +0100 | [diff] [blame] | 537 | DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine"; |
| 538 | DCHECK(context_ != nullptr); |
Mathieu Chartier | 815873e | 2014-02-13 18:02:13 -0800 | [diff] [blame] | 539 | return context_->GetGPRAddress(reg); |
| 540 | } |
| 541 | |
Sebastien Hertz | 96ba8dc | 2015-01-22 18:57:14 +0100 | [diff] [blame] | 542 | uintptr_t StackVisitor::GetGPR(uint32_t reg) const { |
| 543 | DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine"; |
| 544 | DCHECK(context_ != nullptr); |
| 545 | return context_->GetGPR(reg); |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 546 | } |
| 547 | |
Sebastien Hertz | 96ba8dc | 2015-01-22 18:57:14 +0100 | [diff] [blame] | 548 | bool StackVisitor::IsAccessibleFPR(uint32_t reg) const { |
| 549 | DCHECK(context_ != nullptr); |
| 550 | return context_->IsAccessibleFPR(reg); |
Sebastien Hertz | 0bcb290 | 2014-06-17 15:52:45 +0200 | [diff] [blame] | 551 | } |
| 552 | |
Sebastien Hertz | 96ba8dc | 2015-01-22 18:57:14 +0100 | [diff] [blame] | 553 | uintptr_t StackVisitor::GetFPR(uint32_t reg) const { |
| 554 | DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine"; |
| 555 | DCHECK(context_ != nullptr); |
| 556 | return context_->GetFPR(reg); |
| 557 | } |
| 558 | |
Nicolas Geoffray | e91e795 | 2020-01-23 10:15:56 +0000 | [diff] [blame] | 559 | uintptr_t StackVisitor::GetReturnPcAddr() const { |
| 560 | uintptr_t sp = reinterpret_cast<uintptr_t>(GetCurrentQuickFrame()); |
| 561 | DCHECK_NE(sp, 0u); |
| 562 | return sp + GetCurrentQuickFrameInfo().GetReturnPcOffset(); |
| 563 | } |
| 564 | |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 565 | uintptr_t StackVisitor::GetReturnPc() const { |
Nicolas Geoffray | e91e795 | 2020-01-23 10:15:56 +0000 | [diff] [blame] | 566 | return *reinterpret_cast<uintptr_t*>(GetReturnPcAddr()); |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 567 | } |
| 568 | |
| 569 | void StackVisitor::SetReturnPc(uintptr_t new_ret_pc) { |
Nicolas Geoffray | e91e795 | 2020-01-23 10:15:56 +0000 | [diff] [blame] | 570 | *reinterpret_cast<uintptr_t*>(GetReturnPcAddr()) = new_ret_pc; |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 571 | } |
| 572 | |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 573 | size_t StackVisitor::ComputeNumFrames(Thread* thread, StackWalkKind walk_kind) { |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 574 | struct NumFramesVisitor : public StackVisitor { |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 575 | NumFramesVisitor(Thread* thread_in, StackWalkKind walk_kind_in) |
| 576 | : StackVisitor(thread_in, nullptr, walk_kind_in), frames(0) {} |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 577 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 578 | bool VisitFrame() override { |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 579 | frames++; |
| 580 | return true; |
| 581 | } |
Elliott Hughes | 08fc03a | 2012-06-26 17:34:00 -0700 | [diff] [blame] | 582 | |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 583 | size_t frames; |
| 584 | }; |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 585 | NumFramesVisitor visitor(thread, walk_kind); |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 586 | visitor.WalkStack(true); |
| 587 | return visitor.frames; |
| 588 | } |
| 589 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 590 | bool StackVisitor::GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc) { |
Ian Rogers | 5cf9819 | 2014-05-29 21:31:50 -0700 | [diff] [blame] | 591 | struct HasMoreFramesVisitor : public StackVisitor { |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 592 | HasMoreFramesVisitor(Thread* thread, |
| 593 | StackWalkKind walk_kind, |
| 594 | size_t num_frames, |
| 595 | size_t frame_height) |
| 596 | : StackVisitor(thread, nullptr, walk_kind, num_frames), |
| 597 | frame_height_(frame_height), |
| 598 | found_frame_(false), |
| 599 | has_more_frames_(false), |
| 600 | next_method_(nullptr), |
| 601 | next_dex_pc_(0) { |
Ian Rogers | 5cf9819 | 2014-05-29 21:31:50 -0700 | [diff] [blame] | 602 | } |
| 603 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 604 | bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | 5cf9819 | 2014-05-29 21:31:50 -0700 | [diff] [blame] | 605 | if (found_frame_) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 606 | ArtMethod* method = GetMethod(); |
Ian Rogers | 5cf9819 | 2014-05-29 21:31:50 -0700 | [diff] [blame] | 607 | if (method != nullptr && !method->IsRuntimeMethod()) { |
| 608 | has_more_frames_ = true; |
| 609 | next_method_ = method; |
| 610 | next_dex_pc_ = GetDexPc(); |
| 611 | return false; // End stack walk once next method is found. |
| 612 | } |
| 613 | } else if (GetFrameHeight() == frame_height_) { |
| 614 | found_frame_ = true; |
| 615 | } |
| 616 | return true; |
| 617 | } |
| 618 | |
| 619 | size_t frame_height_; |
| 620 | bool found_frame_; |
| 621 | bool has_more_frames_; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 622 | ArtMethod* next_method_; |
Ian Rogers | 5cf9819 | 2014-05-29 21:31:50 -0700 | [diff] [blame] | 623 | uint32_t next_dex_pc_; |
| 624 | }; |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 625 | HasMoreFramesVisitor visitor(thread_, walk_kind_, GetNumFrames(), GetFrameHeight()); |
Ian Rogers | 5cf9819 | 2014-05-29 21:31:50 -0700 | [diff] [blame] | 626 | visitor.WalkStack(true); |
| 627 | *next_method = visitor.next_method_; |
| 628 | *next_dex_pc = visitor.next_dex_pc_; |
| 629 | return visitor.has_more_frames_; |
| 630 | } |
| 631 | |
Ian Rogers | 7a22fa6 | 2013-01-23 12:16:16 -0800 | [diff] [blame] | 632 | void StackVisitor::DescribeStack(Thread* thread) { |
Ian Rogers | 306057f | 2012-11-26 12:45:53 -0800 | [diff] [blame] | 633 | struct DescribeStackVisitor : public StackVisitor { |
Andreas Gampe | 277ccbd | 2014-11-03 21:36:10 -0800 | [diff] [blame] | 634 | explicit DescribeStackVisitor(Thread* thread_in) |
Nicolas Geoffray | 8e5bd18 | 2015-05-06 11:34:34 +0100 | [diff] [blame] | 635 | : StackVisitor(thread_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {} |
Ian Rogers | 306057f | 2012-11-26 12:45:53 -0800 | [diff] [blame] | 636 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 637 | bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | 306057f | 2012-11-26 12:45:53 -0800 | [diff] [blame] | 638 | LOG(INFO) << "Frame Id=" << GetFrameId() << " " << DescribeLocation(); |
| 639 | return true; |
| 640 | } |
| 641 | }; |
Ian Rogers | 7a22fa6 | 2013-01-23 12:16:16 -0800 | [diff] [blame] | 642 | DescribeStackVisitor visitor(thread); |
Ian Rogers | 306057f | 2012-11-26 12:45:53 -0800 | [diff] [blame] | 643 | visitor.WalkStack(true); |
| 644 | } |
| 645 | |
Ian Rogers | 40e3bac | 2012-11-20 00:09:14 -0800 | [diff] [blame] | 646 | std::string StackVisitor::DescribeLocation() const { |
| 647 | std::string result("Visiting method '"); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 648 | ArtMethod* m = GetMethod(); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 649 | if (m == nullptr) { |
Ian Rogers | 306057f | 2012-11-26 12:45:53 -0800 | [diff] [blame] | 650 | return "upcall"; |
| 651 | } |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 652 | result += m->PrettyMethod(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 653 | result += StringPrintf("' at dex PC 0x%04x", GetDexPc()); |
Ian Rogers | 40e3bac | 2012-11-20 00:09:14 -0800 | [diff] [blame] | 654 | if (!IsShadowFrame()) { |
| 655 | result += StringPrintf(" (native PC %p)", reinterpret_cast<void*>(GetCurrentQuickFramePc())); |
| 656 | } |
| 657 | return result; |
| 658 | } |
| 659 | |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame] | 660 | void StackVisitor::SetMethod(ArtMethod* method) { |
| 661 | DCHECK(GetMethod() != nullptr); |
| 662 | if (cur_shadow_frame_ != nullptr) { |
| 663 | cur_shadow_frame_->SetMethod(method); |
| 664 | } else { |
| 665 | DCHECK(cur_quick_frame_ != nullptr); |
Nicolas Geoffray | 226805d | 2018-12-14 10:59:02 +0000 | [diff] [blame] | 666 | CHECK(!IsInInlinedFrame()) << "We do not support setting inlined method's ArtMethod: " |
| 667 | << GetMethod()->PrettyMethod() << " is inlined into " |
| 668 | << GetOuterMethod()->PrettyMethod(); |
Alex Light | 1ebe4fe | 2017-01-30 14:57:11 -0800 | [diff] [blame] | 669 | *cur_quick_frame_ = method; |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame] | 670 | } |
| 671 | } |
| 672 | |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 673 | static void AssertPcIsWithinQuickCode(ArtMethod* method, uintptr_t pc) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 674 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 675 | if (method->IsNative() || method->IsRuntimeMethod() || method->IsProxyMethod()) { |
| 676 | return; |
| 677 | } |
| 678 | |
| 679 | if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) { |
| 680 | return; |
| 681 | } |
| 682 | |
Mingyao Yang | 88ca8ba | 2017-05-23 14:21:07 -0700 | [diff] [blame] | 683 | Runtime* runtime = Runtime::Current(); |
| 684 | if (runtime->UseJitCompilation() && |
| 685 | runtime->GetJit()->GetCodeCache()->ContainsPc(reinterpret_cast<const void*>(pc))) { |
| 686 | return; |
| 687 | } |
| 688 | |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 689 | const void* code = method->GetEntryPointFromQuickCompiledCode(); |
Alex Light | db01a09 | 2017-04-03 15:39:55 -0700 | [diff] [blame] | 690 | if (code == GetQuickInstrumentationEntryPoint() || code == GetInvokeObsoleteMethodStub()) { |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 691 | return; |
| 692 | } |
| 693 | |
| 694 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 695 | if (class_linker->IsQuickToInterpreterBridge(code) || |
| 696 | class_linker->IsQuickResolutionStub(code)) { |
| 697 | return; |
| 698 | } |
| 699 | |
Calin Juravle | ffc8707 | 2016-04-20 14:22:09 +0100 | [diff] [blame] | 700 | if (runtime->UseJitCompilation() && runtime->GetJit()->GetCodeCache()->ContainsPc(code)) { |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 701 | return; |
| 702 | } |
| 703 | |
Mingyao Yang | 063fc77 | 2016-08-02 11:02:54 -0700 | [diff] [blame] | 704 | uint32_t code_size = OatQuickMethodHeader::FromEntryPoint(code)->GetCodeSize(); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 705 | uintptr_t code_start = reinterpret_cast<uintptr_t>(code); |
| 706 | CHECK(code_start <= pc && pc <= (code_start + code_size)) |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 707 | << method->PrettyMethod() |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 708 | << " pc=" << std::hex << pc |
Roland Levillain | 0d5a281 | 2015-11-13 10:07:31 +0000 | [diff] [blame] | 709 | << " code_start=" << code_start |
| 710 | << " code_size=" << code_size; |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 711 | } |
| 712 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 713 | void StackVisitor::SanityCheckFrame() const { |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 714 | if (kIsDebugBuild) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 715 | ArtMethod* method = GetMethod(); |
Vladimir Marko | d93e374 | 2018-07-18 10:58:13 +0100 | [diff] [blame] | 716 | ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass(); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 717 | // Runtime methods have null declaring class. |
| 718 | if (!method->IsRuntimeMethod()) { |
| 719 | CHECK(declaring_class != nullptr); |
| 720 | CHECK_EQ(declaring_class->GetClass(), declaring_class->GetClass()->GetClass()) |
| 721 | << declaring_class; |
| 722 | } else { |
| 723 | CHECK(declaring_class == nullptr); |
| 724 | } |
Mathieu Chartier | 951ec2c | 2015-09-22 08:50:05 -0700 | [diff] [blame] | 725 | Runtime* const runtime = Runtime::Current(); |
| 726 | LinearAlloc* const linear_alloc = runtime->GetLinearAlloc(); |
| 727 | if (!linear_alloc->Contains(method)) { |
| 728 | // Check class linker linear allocs. |
Nicolas Geoffray | 48b40cc | 2017-08-07 16:52:40 +0100 | [diff] [blame] | 729 | // We get the canonical method as copied methods may have their declaring |
| 730 | // class from another class loader. |
Ulya Trafimovich | 819b362 | 2019-12-12 17:59:10 +0000 | [diff] [blame] | 731 | const PointerSize ptrSize = runtime->GetClassLinker()->GetImagePointerSize(); |
| 732 | ArtMethod* canonical = method->GetCanonicalMethod(ptrSize); |
Vladimir Marko | d93e374 | 2018-07-18 10:58:13 +0100 | [diff] [blame] | 733 | ObjPtr<mirror::Class> klass = canonical->GetDeclaringClass(); |
Mathieu Chartier | 951ec2c | 2015-09-22 08:50:05 -0700 | [diff] [blame] | 734 | LinearAlloc* const class_linear_alloc = (klass != nullptr) |
Mathieu Chartier | 5b83050 | 2016-03-02 10:30:23 -0800 | [diff] [blame] | 735 | ? runtime->GetClassLinker()->GetAllocatorForClassLoader(klass->GetClassLoader()) |
Mathieu Chartier | 951ec2c | 2015-09-22 08:50:05 -0700 | [diff] [blame] | 736 | : linear_alloc; |
Nicolas Geoffray | 48b40cc | 2017-08-07 16:52:40 +0100 | [diff] [blame] | 737 | if (!class_linear_alloc->Contains(canonical)) { |
Mathieu Chartier | 951ec2c | 2015-09-22 08:50:05 -0700 | [diff] [blame] | 738 | // Check image space. |
| 739 | bool in_image = false; |
| 740 | for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) { |
| 741 | if (space->IsImageSpace()) { |
| 742 | auto* image_space = space->AsImageSpace(); |
| 743 | const auto& header = image_space->GetImageHeader(); |
Mathieu Chartier | e42888f | 2016-04-14 10:49:19 -0700 | [diff] [blame] | 744 | const ImageSection& methods = header.GetMethodsSection(); |
| 745 | const ImageSection& runtime_methods = header.GetRuntimeMethodsSection(); |
Nicolas Geoffray | 48b40cc | 2017-08-07 16:52:40 +0100 | [diff] [blame] | 746 | const size_t offset = reinterpret_cast<const uint8_t*>(canonical) - image_space->Begin(); |
Mathieu Chartier | e42888f | 2016-04-14 10:49:19 -0700 | [diff] [blame] | 747 | if (methods.Contains(offset) || runtime_methods.Contains(offset)) { |
Mathieu Chartier | 951ec2c | 2015-09-22 08:50:05 -0700 | [diff] [blame] | 748 | in_image = true; |
| 749 | break; |
| 750 | } |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 751 | } |
| 752 | } |
Nicolas Geoffray | 48b40cc | 2017-08-07 16:52:40 +0100 | [diff] [blame] | 753 | CHECK(in_image) << canonical->PrettyMethod() << " not in linear alloc or image"; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 754 | } |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 755 | } |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 756 | if (cur_quick_frame_ != nullptr) { |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 757 | AssertPcIsWithinQuickCode(method, cur_quick_frame_pc_); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 758 | // Frame sanity. |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 759 | size_t frame_size = GetCurrentQuickFrameInfo().FrameSizeInBytes(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 760 | CHECK_NE(frame_size, 0u); |
Nicolas Geoffray | 0039182 | 2019-12-10 10:17:23 +0000 | [diff] [blame] | 761 | // For compiled code, we could try to have a rough guess at an upper size we expect |
| 762 | // to see for a frame: |
Andreas Gampe | 5b417b9 | 2014-03-10 14:18:35 -0700 | [diff] [blame] | 763 | // 256 registers |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 764 | // 2 words HandleScope overhead |
Andreas Gampe | 5b417b9 | 2014-03-10 14:18:35 -0700 | [diff] [blame] | 765 | // 3+3 register spills |
Brian Carlstrom | ed08bd4 | 2014-03-19 18:34:17 -0700 | [diff] [blame] | 766 | // const size_t kMaxExpectedFrameSize = (256 + 2 + 3 + 3) * sizeof(word); |
Nicolas Geoffray | 0039182 | 2019-12-10 10:17:23 +0000 | [diff] [blame] | 767 | const size_t kMaxExpectedFrameSize = interpreter::kMaxNterpFrame; |
David Sehr | 709b070 | 2016-10-13 09:12:37 -0700 | [diff] [blame] | 768 | CHECK_LE(frame_size, kMaxExpectedFrameSize) << method->PrettyMethod(); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 769 | size_t return_pc_offset = GetCurrentQuickFrameInfo().GetReturnPcOffset(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 770 | CHECK_LT(return_pc_offset, frame_size); |
| 771 | } |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 772 | } |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 773 | } |
| 774 | |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 775 | // Counts the number of references in the parameter list of the corresponding method. |
| 776 | // Note: Thus does _not_ include "this" for non-static methods. |
| 777 | static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 778 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 779 | uint32_t shorty_len; |
| 780 | const char* shorty = method->GetShorty(&shorty_len); |
| 781 | uint32_t refs = 0; |
| 782 | for (uint32_t i = 1; i < shorty_len ; ++i) { |
| 783 | if (shorty[i] == 'L') { |
| 784 | refs++; |
| 785 | } |
| 786 | } |
| 787 | return refs; |
| 788 | } |
| 789 | |
| 790 | QuickMethodFrameInfo StackVisitor::GetCurrentQuickFrameInfo() const { |
| 791 | if (cur_oat_quick_method_header_ != nullptr) { |
Nicolas Geoffray | 013d1ee | 2019-12-04 16:18:15 +0000 | [diff] [blame] | 792 | if (cur_oat_quick_method_header_->IsOptimized()) { |
| 793 | return cur_oat_quick_method_header_->GetFrameInfo(); |
| 794 | } else { |
| 795 | DCHECK(cur_oat_quick_method_header_->IsNterpMethodHeader()); |
| 796 | return NterpFrameInfo(cur_quick_frame_); |
| 797 | } |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 798 | } |
| 799 | |
| 800 | ArtMethod* method = GetMethod(); |
| 801 | Runtime* runtime = Runtime::Current(); |
| 802 | |
| 803 | if (method->IsAbstract()) { |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 804 | return RuntimeCalleeSaveFrame::GetMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 805 | } |
| 806 | |
| 807 | // This goes before IsProxyMethod since runtime methods have a null declaring class. |
| 808 | if (method->IsRuntimeMethod()) { |
| 809 | return runtime->GetRuntimeMethodFrameInfo(method); |
| 810 | } |
| 811 | |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 812 | if (method->IsProxyMethod()) { |
Nicolas Geoffray | 22cf3d3 | 2015-11-02 11:57:11 +0000 | [diff] [blame] | 813 | // There is only one direct method of a proxy class: the constructor. A direct method is |
| 814 | // cloned from the original java.lang.reflect.Proxy and is executed as usual quick |
| 815 | // compiled method without any stubs. Therefore the method must have a OatQuickMethodHeader. |
| 816 | DCHECK(!method->IsDirect() && !method->IsConstructor()) |
| 817 | << "Constructors of proxy classes must have a OatQuickMethodHeader"; |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 818 | return RuntimeCalleeSaveFrame::GetMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 819 | } |
| 820 | |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 821 | // The only remaining case is if the method is native and uses the generic JNI stub, |
| 822 | // called either directly or through some (resolution, instrumentation) trampoline. |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 823 | DCHECK(method->IsNative()); |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 824 | if (kIsDebugBuild) { |
| 825 | ClassLinker* class_linker = runtime->GetClassLinker(); |
| 826 | const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(method, |
| 827 | kRuntimePointerSize); |
| 828 | CHECK(class_linker->IsQuickGenericJniStub(entry_point) || |
| 829 | // The current entrypoint (after filtering out trampolines) may have changed |
| 830 | // from GenericJNI to JIT-compiled stub since we have entered this frame. |
| 831 | (runtime->GetJit() != nullptr && |
| 832 | runtime->GetJit()->GetCodeCache()->ContainsPc(entry_point))) << method->PrettyMethod(); |
| 833 | } |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 834 | // Generic JNI frame. |
| 835 | uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(method) + 1; |
| 836 | size_t scope_size = HandleScope::SizeOf(handle_refs); |
Vladimir Marko | d3083dd | 2018-05-17 08:43:47 +0100 | [diff] [blame] | 837 | constexpr QuickMethodFrameInfo callee_info = |
| 838 | RuntimeCalleeSaveFrame::GetMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs); |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 839 | |
| 840 | // Callee saves + handle scope + method ref + alignment |
| 841 | // Note: -sizeof(void*) since callee-save frame stores a whole method pointer. |
| 842 | size_t frame_size = RoundUp( |
| 843 | callee_info.FrameSizeInBytes() - sizeof(void*) + sizeof(ArtMethod*) + scope_size, |
| 844 | kStackAlignment); |
| 845 | return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask()); |
| 846 | } |
| 847 | |
Andreas Gampe | 585da95 | 2016-12-02 14:52:29 -0800 | [diff] [blame] | 848 | template <StackVisitor::CountTransitions kCount> |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 849 | void StackVisitor::WalkStack(bool include_transitions) { |
Hiroshi Yamauchi | 02f365f | 2017-02-03 15:06:00 -0800 | [diff] [blame] | 850 | if (check_suspended_) { |
| 851 | DCHECK(thread_ == Thread::Current() || thread_->IsSuspended()); |
| 852 | } |
Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 853 | CHECK_EQ(cur_depth_, 0U); |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 854 | size_t inlined_frames_count = 0; |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 855 | |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 856 | for (const ManagedStack* current_fragment = thread_->GetManagedStack(); |
| 857 | current_fragment != nullptr; current_fragment = current_fragment->GetLink()) { |
| 858 | cur_shadow_frame_ = current_fragment->GetTopShadowFrame(); |
| 859 | cur_quick_frame_ = current_fragment->GetTopQuickFrame(); |
| 860 | cur_quick_frame_pc_ = 0; |
Nicolas Geoffray | 51ad7fe | 2020-02-04 12:46:47 +0000 | [diff] [blame^] | 861 | DCHECK(cur_oat_quick_method_header_ == nullptr); |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 862 | if (cur_quick_frame_ != nullptr) { // Handle quick stack frames. |
| 863 | // Can't be both a shadow and a quick fragment. |
| 864 | DCHECK(current_fragment->GetTopShadowFrame() == nullptr); |
| 865 | ArtMethod* method = *cur_quick_frame_; |
| 866 | DCHECK(method != nullptr); |
| 867 | bool header_retrieved = false; |
| 868 | if (method->IsNative()) { |
| 869 | // We do not have a PC for the first frame, so we cannot simply use |
| 870 | // ArtMethod::GetOatQuickMethodHeader() as we're unable to distinguish there |
| 871 | // between GenericJNI frame and JIT-compiled JNI stub; the entrypoint may have |
| 872 | // changed since the frame was entered. The top quick frame tag indicates |
| 873 | // GenericJNI here, otherwise it's either AOT-compiled or JNI-compiled JNI stub. |
| 874 | if (UNLIKELY(current_fragment->GetTopQuickFrameTag())) { |
| 875 | // The generic JNI does not have any method header. |
| 876 | cur_oat_quick_method_header_ = nullptr; |
| 877 | } else { |
| 878 | const void* existing_entry_point = method->GetEntryPointFromQuickCompiledCode(); |
| 879 | CHECK(existing_entry_point != nullptr); |
| 880 | Runtime* runtime = Runtime::Current(); |
| 881 | ClassLinker* class_linker = runtime->GetClassLinker(); |
| 882 | // Check whether we can quickly get the header from the current entrypoint. |
| 883 | if (!class_linker->IsQuickGenericJniStub(existing_entry_point) && |
| 884 | !class_linker->IsQuickResolutionStub(existing_entry_point) && |
| 885 | existing_entry_point != GetQuickInstrumentationEntryPoint()) { |
| 886 | cur_oat_quick_method_header_ = |
| 887 | OatQuickMethodHeader::FromEntryPoint(existing_entry_point); |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 888 | } else { |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 889 | const void* code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize()); |
| 890 | if (code != nullptr) { |
| 891 | cur_oat_quick_method_header_ = OatQuickMethodHeader::FromEntryPoint(code); |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 892 | } else { |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 893 | // This must be a JITted JNI stub frame. |
| 894 | CHECK(runtime->GetJit() != nullptr); |
| 895 | code = runtime->GetJit()->GetCodeCache()->GetJniStubCode(method); |
| 896 | CHECK(code != nullptr) << method->PrettyMethod(); |
| 897 | cur_oat_quick_method_header_ = OatQuickMethodHeader::FromCodePointer(code); |
Nicolas Geoffray | 57f6161 | 2015-05-15 13:20:41 +0100 | [diff] [blame] | 898 | } |
| 899 | } |
| 900 | } |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 901 | header_retrieved = true; |
jeffhao | 6641ea1 | 2013-01-02 18:13:42 -0800 | [diff] [blame] | 902 | } |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 903 | while (method != nullptr) { |
| 904 | if (!header_retrieved) { |
| 905 | cur_oat_quick_method_header_ = method->GetOatQuickMethodHeader(cur_quick_frame_pc_); |
| 906 | } |
| 907 | header_retrieved = false; // Force header retrieval in next iteration. |
| 908 | SanityCheckFrame(); |
| 909 | |
| 910 | if ((walk_kind_ == StackWalkKind::kIncludeInlinedFrames) |
| 911 | && (cur_oat_quick_method_header_ != nullptr) |
| 912 | && cur_oat_quick_method_header_->IsOptimized() |
| 913 | && !method->IsNative() // JNI methods cannot have any inlined frames. |
| 914 | && CodeInfo::HasInlineInfo(cur_oat_quick_method_header_->GetOptimizedCodeInfoPtr())) { |
| 915 | DCHECK_NE(cur_quick_frame_pc_, 0u); |
| 916 | CodeInfo* code_info = GetCurrentInlineInfo(); |
| 917 | StackMap* stack_map = GetCurrentStackMap(); |
| 918 | if (stack_map->IsValid() && stack_map->HasInlineInfo()) { |
| 919 | DCHECK_EQ(current_inline_frames_.size(), 0u); |
| 920 | for (current_inline_frames_ = code_info->GetInlineInfosOf(*stack_map); |
| 921 | !current_inline_frames_.empty(); |
| 922 | current_inline_frames_.pop_back()) { |
| 923 | bool should_continue = VisitFrame(); |
| 924 | if (UNLIKELY(!should_continue)) { |
| 925 | return; |
| 926 | } |
| 927 | cur_depth_++; |
| 928 | inlined_frames_count++; |
| 929 | } |
| 930 | } |
| 931 | } |
| 932 | |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 933 | bool should_continue = VisitFrame(); |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 934 | if (UNLIKELY(!should_continue)) { |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 935 | return; |
| 936 | } |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 937 | |
| 938 | QuickMethodFrameInfo frame_info = GetCurrentQuickFrameInfo(); |
| 939 | if (context_ != nullptr) { |
| 940 | context_->FillCalleeSaves(reinterpret_cast<uint8_t*>(cur_quick_frame_), frame_info); |
| 941 | } |
| 942 | // Compute PC for next stack frame from return PC. |
| 943 | size_t frame_size = frame_info.FrameSizeInBytes(); |
Nicolas Geoffray | e91e795 | 2020-01-23 10:15:56 +0000 | [diff] [blame] | 944 | uintptr_t return_pc_addr = GetReturnPcAddr(); |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 945 | uintptr_t return_pc = *reinterpret_cast<uintptr_t*>(return_pc_addr); |
| 946 | |
Nicolas Geoffray | e91e795 | 2020-01-23 10:15:56 +0000 | [diff] [blame] | 947 | if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == return_pc)) { |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 948 | // While profiling, the return pc is restored from the side stack, except when walking |
| 949 | // the stack for an exception where the side stack will be unwound in VisitFrame. |
Nicolas Geoffray | e91e795 | 2020-01-23 10:15:56 +0000 | [diff] [blame] | 950 | const std::map<uintptr_t, instrumentation::InstrumentationStackFrame>& |
| 951 | instrumentation_stack = *thread_->GetInstrumentationStack(); |
| 952 | auto it = instrumentation_stack.find(return_pc_addr); |
| 953 | CHECK(it != instrumentation_stack.end()); |
| 954 | const instrumentation::InstrumentationStackFrame& instrumentation_frame = it->second; |
| 955 | if (GetMethod() == |
| 956 | Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves)) { |
| 957 | // Skip runtime save all callee frames which are used to deliver exceptions. |
| 958 | } else if (instrumentation_frame.interpreter_entry_) { |
| 959 | ArtMethod* callee = |
| 960 | Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs); |
| 961 | CHECK_EQ(GetMethod(), callee) << "Expected: " << ArtMethod::PrettyMethod(callee) |
| 962 | << " Found: " << ArtMethod::PrettyMethod(GetMethod()); |
Nicolas Geoffray | 8feb7eb | 2020-02-04 09:21:33 +0000 | [diff] [blame] | 963 | } else if (!instrumentation_frame.method_->IsRuntimeMethod()) { |
| 964 | // Trampolines get replaced with their actual method in the stack, |
| 965 | // so don't do the check below for runtime methods. |
Nicolas Geoffray | e91e795 | 2020-01-23 10:15:56 +0000 | [diff] [blame] | 966 | // Instrumentation generally doesn't distinguish between a method's obsolete and |
| 967 | // non-obsolete version. |
| 968 | CHECK_EQ(instrumentation_frame.method_->GetNonObsoleteMethod(), |
| 969 | GetMethod()->GetNonObsoleteMethod()) |
| 970 | << "Expected: " |
| 971 | << ArtMethod::PrettyMethod(instrumentation_frame.method_->GetNonObsoleteMethod()) |
| 972 | << " Found: " << ArtMethod::PrettyMethod(GetMethod()->GetNonObsoleteMethod()); |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 973 | } |
Nicolas Geoffray | e91e795 | 2020-01-23 10:15:56 +0000 | [diff] [blame] | 974 | return_pc = instrumentation_frame.return_pc_; |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 975 | } |
| 976 | |
| 977 | cur_quick_frame_pc_ = return_pc; |
| 978 | uint8_t* next_frame = reinterpret_cast<uint8_t*>(cur_quick_frame_) + frame_size; |
| 979 | cur_quick_frame_ = reinterpret_cast<ArtMethod**>(next_frame); |
| 980 | |
| 981 | if (kDebugStackWalk) { |
| 982 | LOG(INFO) << ArtMethod::PrettyMethod(method) << "@" << method << " size=" << frame_size |
| 983 | << std::boolalpha |
| 984 | << " optimized=" << (cur_oat_quick_method_header_ != nullptr && |
| 985 | cur_oat_quick_method_header_->IsOptimized()) |
| 986 | << " native=" << method->IsNative() |
| 987 | << std::noboolalpha |
| 988 | << " entrypoints=" << method->GetEntryPointFromQuickCompiledCode() |
| 989 | << "," << (method->IsNative() ? method->GetEntryPointFromJni() : nullptr) |
| 990 | << " next=" << *cur_quick_frame_; |
| 991 | } |
| 992 | |
| 993 | if (kCount == CountTransitions::kYes || !method->IsRuntimeMethod()) { |
| 994 | cur_depth_++; |
| 995 | } |
| 996 | method = *cur_quick_frame_; |
Alex Light | 721e402 | 2020-01-14 14:45:40 -0800 | [diff] [blame] | 997 | } |
Nicolas Geoffray | 51ad7fe | 2020-02-04 12:46:47 +0000 | [diff] [blame^] | 998 | // We reached a transition frame, it doesn't have a method header. |
| 999 | cur_oat_quick_method_header_ = nullptr; |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 1000 | } else if (cur_shadow_frame_ != nullptr) { |
| 1001 | do { |
| 1002 | SanityCheckFrame(); |
| 1003 | bool should_continue = VisitFrame(); |
| 1004 | if (UNLIKELY(!should_continue)) { |
| 1005 | return; |
| 1006 | } |
| 1007 | cur_depth_++; |
| 1008 | cur_shadow_frame_ = cur_shadow_frame_->GetLink(); |
| 1009 | } while (cur_shadow_frame_ != nullptr); |
| 1010 | } |
| 1011 | if (include_transitions) { |
| 1012 | bool should_continue = VisitFrame(); |
| 1013 | if (!should_continue) { |
| 1014 | return; |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 1015 | } |
| 1016 | } |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 1017 | if (kCount == CountTransitions::kYes) { |
| 1018 | cur_depth_++; |
Andreas Gampe | 585da95 | 2016-12-02 14:52:29 -0800 | [diff] [blame] | 1019 | } |
Alex Light | e0c6d43 | 2020-01-22 22:04:20 +0000 | [diff] [blame] | 1020 | } |
| 1021 | if (num_frames_ != 0) { |
| 1022 | CHECK_EQ(cur_depth_, num_frames_); |
| 1023 | } |
Ian Rogers | 0399dde | 2012-06-06 17:09:28 -0700 | [diff] [blame] | 1024 | } |
| 1025 | |
Andreas Gampe | 585da95 | 2016-12-02 14:52:29 -0800 | [diff] [blame] | 1026 | template void StackVisitor::WalkStack<StackVisitor::CountTransitions::kYes>(bool); |
| 1027 | template void StackVisitor::WalkStack<StackVisitor::CountTransitions::kNo>(bool); |
| 1028 | |
Elliott Hughes | 68e7652 | 2011-10-05 13:22:16 -0700 | [diff] [blame] | 1029 | } // namespace art |