blob: 63f43cf3b24b9f216066434a3ffc066d8f22b001 [file] [log] [blame]
Sebastien Hertzd45a1f52014-01-09 14:56:54 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020017#include "quick_exception_handler.h"
18
Ian Rogerse63db272014-07-15 15:36:11 -070019#include "arch/context.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070021#include "dex_instruction.h"
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020022#include "entrypoints/entrypoint_utils.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070024#include "entrypoints/runtime_asm_entrypoints.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070025#include "handle_scope-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070026#include "mirror/class-inl.h"
27#include "mirror/class_loader.h"
28#include "mirror/throwable.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070029#include "verifier/method_verifier.h"
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010030
31namespace art {
32
Ian Rogers5cf98192014-05-29 21:31:50 -070033static constexpr bool kDebugExceptionDelivery = false;
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070034static constexpr size_t kInvalidFrameDepth = 0xffffffff;
Ian Rogers5cf98192014-05-29 21:31:50 -070035
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020036QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
37 : self_(self), context_(self->GetLongJumpContext()), is_deoptimization_(is_deoptimization),
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010038 method_tracing_active_(is_deoptimization ||
39 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
Andreas Gampe639bdd12015-06-03 11:22:45 -070040 handler_quick_frame_(nullptr), handler_quick_frame_pc_(0), handler_quick_arg0_(0),
41 handler_method_(nullptr), handler_dex_pc_(0), clear_exception_(false),
42 handler_frame_depth_(kInvalidFrameDepth) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010043}
44
Sebastien Hertz520633b2015-09-08 17:03:36 +020045// Finds catch handler.
Ian Rogers5cf98192014-05-29 21:31:50 -070046class CatchBlockStackVisitor FINAL : public StackVisitor {
47 public:
48 CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
49 QuickExceptionHandler* exception_handler)
Mathieu Chartier90443472015-07-16 20:32:27 -070050 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010051 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010052 exception_(exception),
Ian Rogers5cf98192014-05-29 21:31:50 -070053 exception_handler_(exception_handler) {
54 }
55
Mathieu Chartier90443472015-07-16 20:32:27 -070056 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070057 ArtMethod* method = GetMethod();
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070058 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Ian Rogers5cf98192014-05-29 21:31:50 -070059 if (method == nullptr) {
60 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
61 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
62 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
63 uint32_t next_dex_pc;
Mathieu Chartiere401d142015-04-22 13:56:20 -070064 ArtMethod* next_art_method;
Ian Rogers5cf98192014-05-29 21:31:50 -070065 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
66 // Report the method that did the down call as the handler.
67 exception_handler_->SetHandlerDexPc(next_dex_pc);
68 exception_handler_->SetHandlerMethod(next_art_method);
69 if (!has_next) {
70 // No next method? Check exception handler is set up for the unhandled exception handler
71 // case.
72 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
73 DCHECK(nullptr == exception_handler_->GetHandlerMethod());
74 }
75 return false; // End stack walk.
76 }
77 if (method->IsRuntimeMethod()) {
78 // Ignore callee save method.
79 DCHECK(method->IsCalleeSaveMethod());
80 return true;
81 }
Mathieu Chartiere401d142015-04-22 13:56:20 -070082 return HandleTryItems(method);
Ian Rogers5cf98192014-05-29 21:31:50 -070083 }
84
85 private:
Mathieu Chartiere401d142015-04-22 13:56:20 -070086 bool HandleTryItems(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -070087 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers5cf98192014-05-29 21:31:50 -070088 uint32_t dex_pc = DexFile::kDexNoIndex;
89 if (!method->IsNative()) {
90 dex_pc = GetDexPc();
91 }
92 if (dex_pc != DexFile::kDexNoIndex) {
93 bool clear_exception = false;
Sebastien Hertz26f72862015-09-15 09:52:07 +020094 StackHandleScope<1> hs(GetThread());
Ian Rogers5cf98192014-05-29 21:31:50 -070095 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -070096 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
Ian Rogers5cf98192014-05-29 21:31:50 -070097 exception_handler_->SetClearException(clear_exception);
98 if (found_dex_pc != DexFile::kDexNoIndex) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070099 exception_handler_->SetHandlerMethod(method);
Ian Rogers5cf98192014-05-29 21:31:50 -0700100 exception_handler_->SetHandlerDexPc(found_dex_pc);
David Brazdil72f7b882015-09-15 17:00:52 +0100101 exception_handler_->SetHandlerQuickFramePc(
102 method->ToNativeQuickPc(found_dex_pc, /* is_catch_handler */ true));
Ian Rogers5cf98192014-05-29 21:31:50 -0700103 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
104 return false; // End stack walk.
Mingyao Yang99170c62015-07-06 11:10:37 -0700105 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
106 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
107 size_t frame_id = GetFrameId();
108 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
109 if (frame != nullptr) {
110 // We will not execute this shadow frame so we can safely deallocate it.
111 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
112 ShadowFrame::DeleteDeoptimizedFrame(frame);
113 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700114 }
115 }
116 return true; // Continue stack walk.
117 }
118
Ian Rogers5cf98192014-05-29 21:31:50 -0700119 // The exception we're looking for the catch block of.
120 Handle<mirror::Throwable>* exception_;
121 // The quick exception handler we're visiting for.
122 QuickExceptionHandler* const exception_handler_;
123
124 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
125};
126
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000127void QuickExceptionHandler::FindCatch(mirror::Throwable* exception) {
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200128 DCHECK(!is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700129 if (kDebugExceptionDelivery) {
130 mirror::String* msg = exception->GetDetailMessage();
131 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
132 self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception)
133 << ": " << str_msg << "\n");
134 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700135 StackHandleScope<1> hs(self_);
136 Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200137
Sebastien Hertz520633b2015-09-08 17:03:36 +0200138 // Walk the stack to find catch handler.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700139 CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100140 visitor.WalkStack(true);
141
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200142 if (kDebugExceptionDelivery) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700143 if (*handler_quick_frame_ == nullptr) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100144 LOG(INFO) << "Handler is upcall";
Ian Rogers5cf98192014-05-29 21:31:50 -0700145 }
146 if (handler_method_ != nullptr) {
147 const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
148 int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_);
149 LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")";
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100150 }
151 }
152 if (clear_exception_) {
153 // Exception was cleared as part of delivery.
154 DCHECK(!self_->IsExceptionPending());
155 } else {
156 // Put exception back in root set with clear throw location.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000157 self_->SetException(exception_ref.Get());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100158 }
David Brazdil77a48ae2015-09-15 12:34:04 +0000159 // If the handler is in optimized code, we need to set the catch environment.
160 if (*handler_quick_frame_ != nullptr &&
161 handler_method_ != nullptr &&
162 handler_method_->IsOptimized(sizeof(void*))) {
163 SetCatchEnvironmentForOptimizedHandler(&visitor);
164 }
165}
166
167static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
168 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
169 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
170 // distinguish between core/FPU registers and low/high bits on 64-bit.
171 switch (kind) {
172 case DexRegisterLocation::Kind::kConstant:
173 case DexRegisterLocation::Kind::kInStack:
174 // VRegKind is ignored.
175 return VRegKind::kUndefined;
176
177 case DexRegisterLocation::Kind::kInRegister:
178 // Selects core register. For 64-bit registers, selects low 32 bits.
179 return VRegKind::kLongLoVReg;
180
181 case DexRegisterLocation::Kind::kInRegisterHigh:
182 // Selects core register. For 64-bit registers, selects high 32 bits.
183 return VRegKind::kLongHiVReg;
184
185 case DexRegisterLocation::Kind::kInFpuRegister:
186 // Selects FPU register. For 64-bit registers, selects low 32 bits.
187 return VRegKind::kDoubleLoVReg;
188
189 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
190 // Selects FPU register. For 64-bit registers, selects high 32 bits.
191 return VRegKind::kDoubleHiVReg;
192
193 default:
194 LOG(FATAL) << "Unexpected vreg location "
195 << DexRegisterLocation::PrettyDescriptor(kind);
196 UNREACHABLE();
197 }
198}
199
200void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
201 DCHECK(!is_deoptimization_);
202 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
203 DCHECK(handler_method_ != nullptr && handler_method_->IsOptimized(sizeof(void*)));
204
205 if (kDebugExceptionDelivery) {
206 self_->DumpStack(LOG(INFO) << "Setting catch phis: ");
207 }
208
209 const size_t number_of_vregs = handler_method_->GetCodeItem()->registers_size_;
210 CodeInfo code_info = handler_method_->GetOptimizedCodeInfo();
211 StackMapEncoding encoding = code_info.ExtractEncoding();
212
213 // Find stack map of the throwing instruction.
214 StackMap throw_stack_map =
215 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
216 DCHECK(throw_stack_map.IsValid());
217 DexRegisterMap throw_vreg_map =
218 code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
219
220 // Find stack map of the catch block.
221 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
222 DCHECK(catch_stack_map.IsValid());
223 DexRegisterMap catch_vreg_map =
224 code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
225
226 // Copy values between them.
227 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
228 DexRegisterLocation::Kind catch_location =
229 catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
230 if (catch_location == DexRegisterLocation::Kind::kNone) {
231 continue;
232 }
233 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
234
235 // Get vreg value from its current location.
236 uint32_t vreg_value;
237 VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
238 number_of_vregs,
239 code_info,
240 encoding));
241 bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
242 vreg,
243 vreg_kind,
244 &vreg_value);
245 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
246 << "method=" << PrettyMethod(stack_visitor->GetMethod()) << ", "
247 << "dex_pc=" << stack_visitor->GetDexPc() << ", "
248 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
249
250 // Copy value to the catch phi's stack slot.
251 int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
252 number_of_vregs,
253 code_info,
254 encoding);
255 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
256 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
257 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
258 *slot_ptr = vreg_value;
259 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200260}
261
Ian Rogers5cf98192014-05-29 21:31:50 -0700262// Prepares deoptimization.
263class DeoptimizeStackVisitor FINAL : public StackVisitor {
264 public:
Andreas Gampe639bdd12015-06-03 11:22:45 -0700265 DeoptimizeStackVisitor(Thread* self,
266 Context* context,
267 QuickExceptionHandler* exception_handler,
268 bool single_frame)
Mathieu Chartier90443472015-07-16 20:32:27 -0700269 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100270 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100271 exception_handler_(exception_handler),
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700272 prev_shadow_frame_(nullptr),
Andreas Gampe639bdd12015-06-03 11:22:45 -0700273 stacked_shadow_frame_pushed_(false),
274 single_frame_deopt_(single_frame),
275 single_frame_done_(false) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700276 }
277
Mathieu Chartier90443472015-07-16 20:32:27 -0700278 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700279 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700280 ArtMethod* method = GetMethod();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700281 if (method == nullptr || single_frame_done_) {
282 // This is the upcall (or the next full frame in single-frame deopt), we remember the frame
283 // and last pc so that we may long jump to them.
Ian Rogers5cf98192014-05-29 21:31:50 -0700284 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
285 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700286 if (!stacked_shadow_frame_pushed_) {
287 // In case there is no deoptimized shadow frame for this upcall, we still
288 // need to push a nullptr to the stack since there is always a matching pop after
289 // the long jump.
Sebastien Hertz26f72862015-09-15 09:52:07 +0200290 GetThread()->PushStackedShadowFrame(nullptr,
291 StackedShadowFrameType::kDeoptimizationShadowFrame);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700292 stacked_shadow_frame_pushed_ = true;
293 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700294 return false; // End stack walk.
295 } else if (method->IsRuntimeMethod()) {
296 // Ignore callee save method.
297 DCHECK(method->IsCalleeSaveMethod());
298 return true;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200299 } else if (method->IsNative()) {
300 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
301 // the native method.
302 // The top method is a runtime method, the native method comes next.
303 CHECK_EQ(GetFrameDepth(), 1U);
304 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700305 } else {
Andreas Gampe639bdd12015-06-03 11:22:45 -0700306 HandleDeoptimization(method);
307 if (single_frame_deopt_ && !IsInInlinedFrame()) {
308 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
309 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(method));
310 single_frame_done_ = true;
311 }
312 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700313 }
314 }
315
316 private:
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200317 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
318 return static_cast<VRegKind>(kinds.at(reg * 2));
319 }
320
Andreas Gampe639bdd12015-06-03 11:22:45 -0700321 void HandleDeoptimization(ArtMethod* m) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700322 const DexFile::CodeItem* code_item = m->GetCodeItem();
Sebastien Hertz520633b2015-09-08 17:03:36 +0200323 CHECK(code_item != nullptr) << "No code item for " << PrettyMethod(m);
Ian Rogers5cf98192014-05-29 21:31:50 -0700324 uint16_t num_regs = code_item->registers_size_;
325 uint32_t dex_pc = GetDexPc();
Sebastien Hertz26f72862015-09-15 09:52:07 +0200326 StackHandleScope<2> hs(GetThread()); // Dex cache, class loader and method.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700327 mirror::Class* declaring_class = m->GetDeclaringClass();
328 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(declaring_class->GetDexCache()));
329 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(declaring_class->GetClassLoader()));
Sebastien Hertz26f72862015-09-15 09:52:07 +0200330 verifier::MethodVerifier verifier(GetThread(), h_dex_cache->GetDexFile(), h_dex_cache,
331 h_class_loader, &m->GetClassDef(), code_item,
332 m->GetDexMethodIndex(), m, m->GetAccessFlags(), true, true,
333 true, true);
Andreas Gampe2e04bb22015-02-10 15:37:27 -0800334 bool verifier_success = verifier.Verify();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700335 CHECK(verifier_success) << PrettyMethod(m);
Mingyao Yang99170c62015-07-06 11:10:37 -0700336 // Check if a shadow frame already exists for debugger's set-local-value purpose.
337 const size_t frame_id = GetFrameId();
338 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
339 const bool* updated_vregs;
340 if (new_frame == nullptr) {
341 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, m, dex_pc);
342 updated_vregs = nullptr;
343 } else {
344 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
345 DCHECK(updated_vregs != nullptr);
346 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700347 {
Sebastien Hertz26f72862015-09-15 09:52:07 +0200348 ScopedStackedShadowFramePusher pusher(GetThread(), new_frame,
Sebastien Hertzf7958692015-06-09 14:09:14 +0200349 StackedShadowFrameType::kShadowFrameUnderConstruction);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700350 const std::vector<int32_t> kinds(verifier.DescribeVRegs(dex_pc));
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000351
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700352 // Markers for dead values, used when the verifier knows a Dex register is undefined,
353 // or when the compiler knows the register has not been initialized, or is not used
354 // anymore in the method.
355 static constexpr uint32_t kDeadValue = 0xEBADDE09;
356 static constexpr uint64_t kLongDeadValue = 0xEBADDE09EBADDE09;
357 for (uint16_t reg = 0; reg < num_regs; ++reg) {
Mingyao Yang99170c62015-07-06 11:10:37 -0700358 if (updated_vregs != nullptr && updated_vregs[reg]) {
359 // Keep the value set by debugger.
360 continue;
361 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700362 VRegKind kind = GetVRegKind(reg, kinds);
363 switch (kind) {
364 case kUndefined:
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000365 new_frame->SetVReg(reg, kDeadValue);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700366 break;
367 case kConstant:
368 new_frame->SetVReg(reg, kinds.at((reg * 2) + 1));
369 break;
370 case kReferenceVReg: {
371 uint32_t value = 0;
372 // Check IsReferenceVReg in case the compiled GC map doesn't agree with the verifier.
373 // We don't want to copy a stale reference into the shadow frame as a reference.
374 // b/20736048
375 if (GetVReg(m, reg, kind, &value) && IsReferenceVReg(m, reg)) {
376 new_frame->SetVRegReference(reg, reinterpret_cast<mirror::Object*>(value));
377 } else {
378 new_frame->SetVReg(reg, kDeadValue);
379 }
380 break;
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000381 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700382 case kLongLoVReg:
383 if (GetVRegKind(reg + 1, kinds) == kLongHiVReg) {
384 // Treat it as a "long" register pair.
385 uint64_t value = 0;
386 if (GetVRegPair(m, reg, kLongLoVReg, kLongHiVReg, &value)) {
387 new_frame->SetVRegLong(reg, value);
388 } else {
389 new_frame->SetVRegLong(reg, kLongDeadValue);
390 }
391 } else {
392 uint32_t value = 0;
393 if (GetVReg(m, reg, kind, &value)) {
394 new_frame->SetVReg(reg, value);
395 } else {
396 new_frame->SetVReg(reg, kDeadValue);
397 }
398 }
399 break;
400 case kLongHiVReg:
401 if (GetVRegKind(reg - 1, kinds) == kLongLoVReg) {
402 // Nothing to do: we treated it as a "long" register pair.
403 } else {
404 uint32_t value = 0;
405 if (GetVReg(m, reg, kind, &value)) {
406 new_frame->SetVReg(reg, value);
407 } else {
408 new_frame->SetVReg(reg, kDeadValue);
409 }
410 }
411 break;
412 case kDoubleLoVReg:
413 if (GetVRegKind(reg + 1, kinds) == kDoubleHiVReg) {
414 uint64_t value = 0;
415 if (GetVRegPair(m, reg, kDoubleLoVReg, kDoubleHiVReg, &value)) {
416 // Treat it as a "double" register pair.
417 new_frame->SetVRegLong(reg, value);
418 } else {
419 new_frame->SetVRegLong(reg, kLongDeadValue);
420 }
421 } else {
422 uint32_t value = 0;
423 if (GetVReg(m, reg, kind, &value)) {
424 new_frame->SetVReg(reg, value);
425 } else {
426 new_frame->SetVReg(reg, kDeadValue);
427 }
428 }
429 break;
430 case kDoubleHiVReg:
431 if (GetVRegKind(reg - 1, kinds) == kDoubleLoVReg) {
432 // Nothing to do: we treated it as a "double" register pair.
433 } else {
434 uint32_t value = 0;
435 if (GetVReg(m, reg, kind, &value)) {
436 new_frame->SetVReg(reg, value);
437 } else {
438 new_frame->SetVReg(reg, kDeadValue);
439 }
440 }
441 break;
442 default:
443 uint32_t value = 0;
444 if (GetVReg(m, reg, kind, &value)) {
445 new_frame->SetVReg(reg, value);
446 } else {
447 new_frame->SetVReg(reg, kDeadValue);
448 }
449 break;
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000450 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700451 }
452 }
Mingyao Yang99170c62015-07-06 11:10:37 -0700453 if (updated_vregs != nullptr) {
454 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
455 // array so this must come after we processed the frame.
456 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
457 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
458 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700459 if (prev_shadow_frame_ != nullptr) {
460 prev_shadow_frame_->SetLink(new_frame);
461 } else {
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700462 // Will be popped after the long jump after DeoptimizeStack(),
463 // right before interpreter::EnterInterpreterFromDeoptimize().
464 stacked_shadow_frame_pushed_ = true;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700465 GetThread()->PushStackedShadowFrame(
466 new_frame,
467 single_frame_deopt_
468 ? StackedShadowFrameType::kSingleFrameDeoptimizationShadowFrame
469 : StackedShadowFrameType::kDeoptimizationShadowFrame);
Ian Rogers5cf98192014-05-29 21:31:50 -0700470 }
471 prev_shadow_frame_ = new_frame;
Ian Rogers5cf98192014-05-29 21:31:50 -0700472 }
473
Ian Rogers5cf98192014-05-29 21:31:50 -0700474 QuickExceptionHandler* const exception_handler_;
475 ShadowFrame* prev_shadow_frame_;
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700476 bool stacked_shadow_frame_pushed_;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700477 const bool single_frame_deopt_;
478 bool single_frame_done_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700479
480 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
481};
482
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200483void QuickExceptionHandler::DeoptimizeStack() {
484 DCHECK(is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700485 if (kDebugExceptionDelivery) {
486 self_->DumpStack(LOG(INFO) << "Deoptimizing: ");
487 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200488
Andreas Gampe639bdd12015-06-03 11:22:45 -0700489 DeoptimizeStackVisitor visitor(self_, context_, this, false);
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200490 visitor.WalkStack(true);
491
492 // Restore deoptimization exception
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000493 self_->SetException(Thread::GetDeoptimizationException());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100494}
495
Andreas Gampe639bdd12015-06-03 11:22:45 -0700496void QuickExceptionHandler::DeoptimizeSingleFrame() {
497 DCHECK(is_deoptimization_);
498
499 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
500 LOG(INFO) << "Single-frame deopting:";
501 DumpFramesWithType(self_, true);
502 }
503
504 DeoptimizeStackVisitor visitor(self_, context_, this, true);
505 visitor.WalkStack(true);
506
507 // PC needs to be of the quick-to-interpreter bridge.
508 int32_t offset;
509 #ifdef __LP64__
510 offset = GetThreadOffset<8>(kQuickQuickToInterpreterBridge).Int32Value();
511 #else
512 offset = GetThreadOffset<4>(kQuickQuickToInterpreterBridge).Int32Value();
513 #endif
514 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
515 reinterpret_cast<uint8_t*>(self_) + offset);
516}
517
518void QuickExceptionHandler::DeoptimizeSingleFrameArchDependentFixup() {
519 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
520
521 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
522 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
523 // change how longjump works.
524 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
525 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
526 }
527}
528
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100529// Unwinds all instrumentation stack frame prior to catch handler or upcall.
530class InstrumentationStackVisitor : public StackVisitor {
531 public:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700532 InstrumentationStackVisitor(Thread* self, size_t frame_depth)
Mathieu Chartier90443472015-07-16 20:32:27 -0700533 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100534 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Ian Rogerscf7f1912014-10-22 22:06:39 -0700535 frame_depth_(frame_depth),
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100536 instrumentation_frames_to_pop_(0) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700537 CHECK_NE(frame_depth_, kInvalidFrameDepth);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100538 }
539
Mathieu Chartier90443472015-07-16 20:32:27 -0700540 bool VisitFrame() SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700541 size_t current_frame_depth = GetFrameDepth();
542 if (current_frame_depth < frame_depth_) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100543 CHECK(GetMethod() != nullptr);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700544 if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100545 if (!IsInInlinedFrame()) {
546 // We do not count inlined frames, because we do not instrument them. The reason we
547 // include them in the stack walking is the check against `frame_depth_`, which is
548 // given to us by a visitor that visits inlined frames.
549 ++instrumentation_frames_to_pop_;
550 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100551 }
552 return true;
553 } else {
554 // We reached the frame of the catch handler or the upcall.
555 return false;
556 }
557 }
558
559 size_t GetInstrumentationFramesToPop() const {
560 return instrumentation_frames_to_pop_;
561 }
562
563 private:
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700564 const size_t frame_depth_;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100565 size_t instrumentation_frames_to_pop_;
566
567 DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
568};
569
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200570void QuickExceptionHandler::UpdateInstrumentationStack() {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100571 if (method_tracing_active_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700572 InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100573 visitor.WalkStack(true);
574
575 size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
576 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
577 for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
578 instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
579 }
580 }
581}
582
Andreas Gampe639bdd12015-06-03 11:22:45 -0700583void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100584 // Place context back on thread so it will be available when we continue.
585 self_->ReleaseLongJumpContext(context_);
586 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
587 CHECK_NE(handler_quick_frame_pc_, 0u);
588 context_->SetPC(handler_quick_frame_pc_);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700589 context_->SetArg0(handler_quick_arg0_);
590 if (smash_caller_saves) {
591 context_->SmashCallerSaves();
592 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100593 context_->DoLongJump();
Andreas Gampe794ad762015-02-23 08:12:24 -0800594 UNREACHABLE();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100595}
596
Andreas Gampe639bdd12015-06-03 11:22:45 -0700597// Prints out methods with their type of frame.
598class DumpFramesWithTypeStackVisitor FINAL : public StackVisitor {
599 public:
600 DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
601 SHARED_REQUIRES(Locks::mutator_lock_)
602 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
603 show_details_(show_details) {}
604
605 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
606 ArtMethod* method = GetMethod();
607 if (show_details_) {
608 LOG(INFO) << "|> pc = " << std::hex << GetCurrentQuickFramePc();
609 LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
610 if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
611 LOG(INFO) << "|> ret = " << std::hex << GetReturnPc();
612 }
613 }
614 if (method == nullptr) {
615 // Transition, do go on, we want to unwind over bridges, all the way.
616 if (show_details_) {
617 LOG(INFO) << "N <transition>";
618 }
619 return true;
620 } else if (method->IsRuntimeMethod()) {
621 if (show_details_) {
622 LOG(INFO) << "R " << PrettyMethod(method, true);
623 }
624 return true;
625 } else {
626 bool is_shadow = GetCurrentShadowFrame() != nullptr;
627 LOG(INFO) << (is_shadow ? "S" : "Q")
628 << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
629 << " "
630 << PrettyMethod(method, true);
631 return true; // Go on.
632 }
633 }
634
635 private:
636 bool show_details_;
637
638 DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
639};
640
641void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
642 DumpFramesWithTypeStackVisitor visitor(self, details);
643 visitor.WalkStack(true);
644}
645
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100646} // namespace art