blob: 8ef5ef41fc2e04102917b071afa7fb039d0190f5 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
Alex Lightb7c640d2019-03-20 15:52:13 -070019#include <functional>
20#include <optional>
Ian Rogersc7dd2952014-10-21 23:31:19 -070021#include <sstream>
22
Andreas Gampec7d878d2018-11-19 18:42:06 +000023#include <android-base/logging.h>
24
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "arch/context.h"
Alex Lightd7661582017-05-01 13:48:16 -070026#include "art_field-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070027#include "art_method-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080028#include "base/atomic.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070029#include "base/callee_save_type.h"
jeffhao725a9572012-11-13 18:20:12 -080030#include "class_linker.h"
31#include "debugger.h"
David Sehr9e734c72018-01-04 17:56:19 -080032#include "dex/dex_file-inl.h"
33#include "dex/dex_file_types.h"
34#include "dex/dex_instruction-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080035#include "entrypoints/quick/quick_alloc_entrypoints.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070036#include "entrypoints/quick/quick_entrypoints.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070037#include "entrypoints/runtime_asm_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070038#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010039#include "interpreter/interpreter.h"
Mingyao Yang2ee17902017-08-30 11:37:08 -070040#include "interpreter/interpreter_common.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080041#include "jit/jit.h"
42#include "jit/jit_code_cache.h"
Alex Lightd7661582017-05-01 13:48:16 -070043#include "jvalue-inl.h"
Alex Lightb7c640d2019-03-20 15:52:13 -070044#include "jvalue.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080045#include "mirror/class-inl.h"
46#include "mirror/dex_cache.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070047#include "mirror/object-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070048#include "mirror/object_array-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080049#include "nth_caller_visitor.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010050#include "oat_quick_method_header.h"
David Srbecky28f6cff2018-10-16 15:07:28 +010051#include "runtime-inl.h"
jeffhao725a9572012-11-13 18:20:12 -080052#include "thread.h"
53#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080054
55namespace art {
Ian Rogers62d6c772013-02-27 08:32:07 -080056namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080057
Sebastien Hertz0462c4c2015-04-01 16:34:17 +020058constexpr bool kVerboseInstrumentation = false;
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010059
Alex Lightb7c640d2019-03-20 15:52:13 -070060void InstrumentationListener::MethodExited(
61 Thread* thread,
Alex Lightb7c640d2019-03-20 15:52:13 -070062 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -070063 OptionalFrame frame,
64 MutableHandle<mirror::Object>& return_value) {
Alex Lightd7661582017-05-01 13:48:16 -070065 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
66 Primitive::kPrimNot);
Alex Lightb7c640d2019-03-20 15:52:13 -070067 const void* original_ret = return_value.Get();
Alex Lightd7661582017-05-01 13:48:16 -070068 JValue v;
69 v.SetL(return_value.Get());
Mythri Alle18fba4c2021-10-27 10:00:55 +000070 MethodExited(thread, method, frame, v);
Alex Lightb7c640d2019-03-20 15:52:13 -070071 DCHECK(original_ret == v.GetL()) << "Return value changed";
Alex Lightd7661582017-05-01 13:48:16 -070072}
73
74void InstrumentationListener::FieldWritten(Thread* thread,
75 Handle<mirror::Object> this_object,
76 ArtMethod* method,
77 uint32_t dex_pc,
78 ArtField* field,
79 Handle<mirror::Object> field_value) {
80 DCHECK(!field->IsPrimitiveType());
81 JValue v;
82 v.SetL(field_value.Get());
83 FieldWritten(thread, this_object, method, dex_pc, field, v);
84}
85
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010086// Instrumentation works on non-inlined frames by updating returned PCs
87// of compiled frames.
88static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
89 StackVisitor::StackWalkKind::kSkipInlinedFrames;
90
Mathieu Chartiere0671ce2015-07-28 17:23:28 -070091class InstallStubsClassVisitor : public ClassVisitor {
92 public:
93 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
94 : instrumentation_(instrumentation) {}
95
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010096 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -070097 instrumentation_->InstallStubsForClass(klass.Ptr());
Mathieu Chartiere0671ce2015-07-28 17:23:28 -070098 return true; // we visit all classes.
99 }
100
101 private:
102 Instrumentation* const instrumentation_;
103};
104
Alex Light2c8206f2018-06-08 14:51:09 -0700105InstrumentationStackPopper::InstrumentationStackPopper(Thread* self)
106 : self_(self),
107 instrumentation_(Runtime::Current()->GetInstrumentation()),
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000108 pop_until_(0u) {}
Alex Light2c8206f2018-06-08 14:51:09 -0700109
110InstrumentationStackPopper::~InstrumentationStackPopper() {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000111 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
112 self_->GetInstrumentationStack();
113 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until_;) {
114 i = stack->erase(i);
Alex Light2c8206f2018-06-08 14:51:09 -0700115 }
116}
117
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000118bool InstrumentationStackPopper::PopFramesTo(uintptr_t stack_pointer,
Alex Light2c8206f2018-06-08 14:51:09 -0700119 MutableHandle<mirror::Throwable>& exception) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000120 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
121 self_->GetInstrumentationStack();
Alex Light2c8206f2018-06-08 14:51:09 -0700122 DCHECK(!self_->IsExceptionPending());
123 if (!instrumentation_->HasMethodUnwindListeners()) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000124 pop_until_ = stack_pointer;
Alex Light2c8206f2018-06-08 14:51:09 -0700125 return true;
126 }
127 if (kVerboseInstrumentation) {
128 LOG(INFO) << "Popping frames for exception " << exception->Dump();
129 }
130 // The instrumentation events expect the exception to be set.
131 self_->SetException(exception.Get());
132 bool new_exception_thrown = false;
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000133 auto i = stack->upper_bound(pop_until_);
134
135 // Now pop all frames until reaching stack_pointer, or a new exception is
136 // thrown. Note that `stack_pointer` doesn't need to be a return PC address
137 // (in fact the exception handling code passes the start of the frame where
138 // the catch handler is).
139 for (; i != stack->end() && i->first <= stack_pointer; i++) {
140 const InstrumentationStackFrame& frame = i->second;
Alex Light2c8206f2018-06-08 14:51:09 -0700141 ArtMethod* method = frame.method_;
142 // Notify listeners of method unwind.
143 // TODO: improve the dex_pc information here.
144 uint32_t dex_pc = dex::kDexNoIndex;
145 if (kVerboseInstrumentation) {
146 LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
147 }
148 if (!method->IsRuntimeMethod() && !frame.interpreter_entry_) {
149 instrumentation_->MethodUnwindEvent(self_, frame.this_object_, method, dex_pc);
150 new_exception_thrown = self_->GetException() != exception.Get();
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000151 if (new_exception_thrown) {
152 pop_until_ = i->first;
153 break;
154 }
Alex Light2c8206f2018-06-08 14:51:09 -0700155 }
156 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000157 if (!new_exception_thrown) {
158 pop_until_ = stack_pointer;
159 }
Alex Light2c8206f2018-06-08 14:51:09 -0700160 exception.Assign(self_->GetException());
161 self_->ClearException();
162 if (kVerboseInstrumentation && new_exception_thrown) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000163 LOG(INFO) << "Did partial pop of frames due to new exception";
Alex Light2c8206f2018-06-08 14:51:09 -0700164 }
165 return !new_exception_thrown;
166}
Ian Rogers62d6c772013-02-27 08:32:07 -0800167
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700168Instrumentation::Instrumentation()
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000169 : current_force_deopt_id_(0),
170 instrumentation_stubs_installed_(false),
Mythri Alle9575c122021-11-12 12:04:41 +0000171 instrumentation_level_(InstrumentationLevel::kInstrumentNothing),
Nicolas Geoffray5a23d2e2015-11-03 18:58:57 +0000172 forced_interpret_only_(false),
173 have_method_entry_listeners_(false),
174 have_method_exit_listeners_(false),
175 have_method_unwind_listeners_(false),
176 have_dex_pc_listeners_(false),
177 have_field_read_listeners_(false),
178 have_field_write_listeners_(false),
Alex Light6e1607e2017-08-23 10:06:18 -0700179 have_exception_thrown_listeners_(false),
Alex Lighte814f9d2017-07-31 16:14:39 -0700180 have_watched_frame_pop_listeners_(false),
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000181 have_branch_listeners_(false),
Alex Light9fb1ab12017-09-05 09:32:49 -0700182 have_exception_handled_listeners_(false),
Andreas Gampe7e56a072018-11-29 10:40:06 -0800183 deoptimized_methods_lock_(new ReaderWriterMutex("deoptimized methods lock",
184 kGenericBottomLock)),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700185 deoptimization_enabled_(false),
186 interpreter_handler_table_(kMainHandlerTable),
Mathieu Chartier50e93312016-03-16 11:25:29 -0700187 quick_alloc_entry_points_instrumentation_counter_(0),
Alex Light40607862019-05-06 18:16:24 +0000188 alloc_entrypoints_instrumented_(false),
189 can_use_instrumentation_trampolines_(true) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700190}
191
Vladimir Marko19711d42019-04-12 14:05:34 +0100192void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
Vladimir Marko72ab6842017-01-20 19:32:50 +0000193 if (!klass->IsResolved()) {
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100194 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
195 // could not be initialized or linked with regards to class inheritance.
Vladimir Marko72ab6842017-01-20 19:32:50 +0000196 } else if (klass->IsErroneousResolved()) {
197 // We can't execute code in a erroneous class: do nothing.
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100198 } else {
Andreas Gampe542451c2016-07-26 09:02:02 -0700199 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
Alex Light51a64d52015-12-17 13:55:59 -0800200 InstallStubsForMethod(&method);
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100201 }
jeffhao725a9572012-11-13 18:20:12 -0800202 }
jeffhao725a9572012-11-13 18:20:12 -0800203}
204
Mathieu Chartiere401d142015-04-22 13:56:20 -0700205static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700206 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffrayd5a95872019-08-12 13:24:07 +0100207 if (kIsDebugBuild) {
208 jit::Jit* jit = Runtime::Current()->GetJit();
209 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
210 // Ensure we always have the thumb entrypoint for JIT on arm32.
211 if (kRuntimeISA == InstructionSet::kArm) {
212 CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
213 }
214 }
215 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800216 method->SetEntryPointFromQuickCompiledCode(quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100217}
218
Alex Light0fa17862017-10-24 13:43:05 -0700219bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const
220 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightf2858632018-04-02 11:28:50 -0700221 art::Runtime* runtime = Runtime::Current();
222 // If anything says we need the debug version or we are debuggable we will need the debug version
223 // of the method.
224 return (runtime->GetRuntimeCallbacks()->MethodNeedsDebugVersion(method) ||
225 runtime->IsJavaDebuggable()) &&
Mingyao Yang6ea1a0e2016-01-29 12:12:49 -0800226 !method->IsNative() &&
Alex Lightf2858632018-04-02 11:28:50 -0700227 !method->IsProxyMethod();
Mingyao Yang6ea1a0e2016-01-29 12:12:49 -0800228}
229
Mythri Alle5097f832021-11-02 14:52:30 +0000230bool Instrumentation::CodeNeedsEntryExitStub(const void* code, ArtMethod* method) {
231 // In some tests runtime isn't setup fully and hence the entry points could
232 // be nullptr.
233 if (code == nullptr) {
234 return true;
235 }
236
Mythri Alle519ff8b2021-11-17 13:47:07 +0000237 // When jiting code for debuggable apps we generate the code to call method
238 // entry / exit hooks when required. Hence it is not required to update
239 // to instrumentation entry point for JITed code in debuggable mode.
240 if (!Runtime::Current()->IsJavaDebuggable()) {
241 return true;
242 }
243
Mythri Alle5097f832021-11-02 14:52:30 +0000244 // Native functions can have JITed entry points but we don't include support
245 // for calling entry / exit hooks directly from the JITed code for native
246 // functions. So we still have to install entry exit stubs for such cases.
247 if (method->IsNative()) {
248 return true;
249 }
250
251 jit::Jit* jit = Runtime::Current()->GetJit();
252 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(code)) {
253 return false;
254 }
255 return true;
256}
257
Mathieu Chartiere401d142015-04-22 13:56:20 -0700258void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
Alex Light9139e002015-10-09 15:59:48 -0700259 if (!method->IsInvokable() || method->IsProxyMethod()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100260 // Do not change stubs for these methods.
261 return;
262 }
Jeff Hao56802772014-08-19 10:17:36 -0700263 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
Alex Light6cae5ea2018-06-07 17:07:02 -0700264 // TODO We should remove the need for this since it means we cannot always correctly detect calls
265 // to Proxy.<init>
266 // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
267 // we also need to check this based on the declaring-class descriptor. The check is valid because
268 // Proxy only has a single constructor.
269 ArtMethod* well_known_proxy_init = jni::DecodeArtMethod(
270 WellKnownClasses::java_lang_reflect_Proxy_init);
271 if ((LIKELY(well_known_proxy_init != nullptr) && UNLIKELY(method == well_known_proxy_init)) ||
272 UNLIKELY(method->IsConstructor() &&
273 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;"))) {
Jeff Haodb8a6642014-08-14 17:18:52 -0700274 return;
275 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800276 const void* new_quick_code;
Mythri Alle9575c122021-11-12 12:04:41 +0000277 bool uninstall = (instrumentation_level_ == InstrumentationLevel::kInstrumentNothing);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800278 Runtime* const runtime = Runtime::Current();
279 ClassLinker* const class_linker = runtime->GetClassLinker();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100280 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
281 if (uninstall) {
282 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800283 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100284 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Alex Light3e36a9c2018-06-19 09:45:05 -0700285 new_quick_code = GetCodeForInvoke(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100286 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700287 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100288 }
289 } else { // !uninstall
Mythri Alle9575c122021-11-12 12:04:41 +0000290 if ((InterpretOnly() || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800291 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100292 } else {
293 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
294 // class, all its static methods code will be set to the instrumentation entry point.
295 // For more details, see ClassLinker::FixupStaticTrampolines.
296 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Mythri Alle9575c122021-11-12 12:04:41 +0000297 if (EntryExitStubsInstalled()) {
Alex Light2d441b12018-06-08 15:33:21 -0700298 // This needs to be checked first since the instrumentation entrypoint will be able to
299 // find the actual JIT compiled code that corresponds to this method.
Mythri Alle5097f832021-11-02 14:52:30 +0000300 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
301 if (CodeNeedsEntryExitStub(code, method)) {
302 new_quick_code = GetQuickInstrumentationEntryPoint();
303 } else {
304 new_quick_code = code;
305 }
Alex Light2d441b12018-06-08 15:33:21 -0700306 } else if (NeedDebugVersionFor(method)) {
307 // It would be great to search the JIT for its implementation here but we cannot due to
308 // the locks we hold. Instead just set to the interpreter bridge and that code will search
309 // the JIT when it gets called and replace the entrypoint then.
310 new_quick_code = GetQuickToInterpreterBridge();
Nicolas Geoffraya0619e22016-12-20 13:57:43 +0000311 } else {
Alex Lightfc49fec2018-01-16 22:28:36 +0000312 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100313 }
314 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700315 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100316 }
317 }
318 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800319 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100320}
321
Ian Rogers62d6c772013-02-27 08:32:07 -0800322// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
Mythri Alle5097f832021-11-02 14:52:30 +0000323// deoptimization of quick frames to interpreter frames. When force_deopt is
324// true the frames have to be deoptimized. If the frame has a deoptimization
325// stack slot (all Jited frames), it is set to true to indicate this. For frames
326// that do not have this slot, the force_deopt_id on the InstrumentationStack is
327// used to check if the frame needs to be deoptimized. When force_deopt is false
328// we just instrument the stack for method entry / exit hooks.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100329// Since we may already have done this previously, we need to push new instrumentation frame before
330// existing instrumentation frames.
Mythri Alle5097f832021-11-02 14:52:30 +0000331void InstrumentationInstallStack(Thread* thread, void* arg, bool deopt_all_frames)
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000332 REQUIRES(Locks::mutator_lock_) {
333 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100334 struct InstallStackVisitor final : public StackVisitor {
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000335 InstallStackVisitor(Thread* thread_in,
336 Context* context,
337 uintptr_t instrumentation_exit_pc,
Mythri Alle5097f832021-11-02 14:52:30 +0000338 uint64_t force_deopt_id,
339 bool deopt_all_frames)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100340 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800341 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100342 instrumentation_exit_pc_(instrumentation_exit_pc),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000343 reached_existing_instrumentation_frames_(false),
Mythri Alle5097f832021-11-02 14:52:30 +0000344 force_deopt_id_(force_deopt_id),
345 deopt_all_frames_(deopt_all_frames) {}
jeffhao725a9572012-11-13 18:20:12 -0800346
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100347 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700348 ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700349 if (m == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800350 if (kVerboseInstrumentation) {
351 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
352 }
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700353 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800354 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700355 if (GetCurrentQuickFrame() == nullptr) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800356 bool interpreter_frame = true;
Vladimir Markoabedfca2019-05-23 14:07:47 +0100357 InstrumentationStackFrame instrumentation_frame(GetThisObject().Ptr(),
358 m,
359 /*return_pc=*/ 0,
360 GetFrameId(),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000361 interpreter_frame,
362 force_deopt_id_);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700363 if (kVerboseInstrumentation) {
364 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
365 }
366 shadow_stack_.push_back(instrumentation_frame);
367 return true; // Continue.
368 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800369 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200370 if (kVerboseInstrumentation) {
371 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
372 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100373 if (return_pc == instrumentation_exit_pc_) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000374 auto it = instrumentation_stack_->find(GetReturnPcAddr());
375 CHECK(it != instrumentation_stack_->end());
376 const InstrumentationStackFrame& frame = it->second;
Mingyao Yang2ee17902017-08-30 11:37:08 -0700377 if (m->IsRuntimeMethod()) {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700378 if (frame.interpreter_entry_) {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700379 return true;
380 }
381 }
382
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100383 // We've reached a frame which has already been installed with instrumentation exit stub.
Alex Light74c91c92018-03-08 14:01:44 -0800384 // We should have already installed instrumentation or be interpreter on previous frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100385 reached_existing_instrumentation_frames_ = true;
386
Alex Lightfc81d802018-12-07 13:39:05 -0800387 CHECK_EQ(m->GetNonObsoleteMethod(), frame.method_->GetNonObsoleteMethod())
388 << "Expected " << ArtMethod::PrettyMethod(m)
389 << ", Found " << ArtMethod::PrettyMethod(frame.method_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100390 return_pc = frame.return_pc_;
391 if (kVerboseInstrumentation) {
392 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
393 }
394 } else {
Mythri Alle5097f832021-11-02 14:52:30 +0000395 // If it is a JITed frame then just set the deopt bit if required
396 // otherwise continue
397 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
398 if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
399 if (deopt_all_frames_) {
400 SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kDebug);
401 }
402 return true;
403 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100404 CHECK_NE(return_pc, 0U);
Alex Light74c91c92018-03-08 14:01:44 -0800405 if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
406 // We already saw an existing instrumentation frame so this should be a runtime-method
407 // inserted by the interpreter or runtime.
Alex Lighte9278662018-03-08 16:55:58 -0800408 std::string thread_name;
409 GetThread()->GetThreadName(thread_name);
Alex Light74c91c92018-03-08 14:01:44 -0800410 LOG(FATAL) << "While walking " << thread_name << " found unexpected non-runtime method"
411 << " without instrumentation exit return or interpreter frame."
Alex Lighte9278662018-03-08 16:55:58 -0800412 << " method is " << GetMethod()->PrettyMethod()
Mythri Alle18fba4c2021-10-27 10:00:55 +0000413 << " return_pc is " << std::hex << return_pc;
Alex Lighte9278662018-03-08 16:55:58 -0800414 UNREACHABLE();
415 }
Vladimir Marko46a89102021-10-21 13:05:46 +0000416 if (m->IsRuntimeMethod()) {
417 size_t frame_size = GetCurrentQuickFrameInfo().FrameSizeInBytes();
418 ArtMethod** caller_frame = reinterpret_cast<ArtMethod**>(
419 reinterpret_cast<uint8_t*>(GetCurrentQuickFrame()) + frame_size);
420 if (*caller_frame != nullptr && (*caller_frame)->IsNative()) {
421 // Do not install instrumentation exit on return to JNI stubs.
422 return true;
423 }
424 }
Mingyao Yang2ee17902017-08-30 11:37:08 -0700425 InstrumentationStackFrame instrumentation_frame(
Vladimir Markoabedfca2019-05-23 14:07:47 +0100426 m->IsRuntimeMethod() ? nullptr : GetThisObject().Ptr(),
Mingyao Yang2ee17902017-08-30 11:37:08 -0700427 m,
428 return_pc,
429 GetFrameId(), // A runtime method still gets a frame id.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000430 false,
431 force_deopt_id_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100432 if (kVerboseInstrumentation) {
433 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
434 }
435
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000436 instrumentation_stack_->insert({GetReturnPcAddr(), instrumentation_frame});
Alex Lighte0c6d432020-01-22 22:04:20 +0000437 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800438 }
Ian Rogers306057f2012-11-26 12:45:53 -0800439 return true; // Continue.
440 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000441 std::map<uintptr_t, InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700442 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers306057f2012-11-26 12:45:53 -0800443 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100444 bool reached_existing_instrumentation_frames_;
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000445 uint64_t force_deopt_id_;
Mythri Alle5097f832021-11-02 14:52:30 +0000446 bool deopt_all_frames_;
Ian Rogers306057f2012-11-26 12:45:53 -0800447 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800448 if (kVerboseInstrumentation) {
449 std::string thread_name;
450 thread->GetThreadName(thread_name);
451 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800452 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100453
454 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700455 std::unique_ptr<Context> context(Context::Create());
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700456 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Mythri Alle5097f832021-11-02 14:52:30 +0000457 InstallStackVisitor visitor(thread,
458 context.get(),
459 instrumentation_exit_pc,
460 instrumentation->current_force_deopt_id_,
461 deopt_all_frames);
Ian Rogers62d6c772013-02-27 08:32:07 -0800462 visitor.WalkStack(true);
463
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100464 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100465 // Create method enter events for all methods currently on the thread's stack. We only do this
466 // if no debugger is attached to prevent from posting events twice.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000467 // TODO: This is the only place we make use of frame_id_. We should create a
468 // std::vector instead and populate it as we walk the stack.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700469 auto ssi = visitor.shadow_stack_.rbegin();
470 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
471 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000472 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < isi->second.frame_id_) {
Mythri Alle9cc65df2021-09-21 15:09:58 +0000473 instrumentation->MethodEnterEvent(thread, (*ssi).method_);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700474 ++ssi;
475 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000476 if (!isi->second.interpreter_entry_ && !isi->second.method_->IsRuntimeMethod()) {
Mythri Alle9cc65df2021-09-21 15:09:58 +0000477 instrumentation->MethodEnterEvent(thread, isi->second.method_);
Sebastien Hertz320deb22014-06-11 19:45:05 +0200478 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100479 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800480 }
481 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800482}
483
Mythri Alle5097f832021-11-02 14:52:30 +0000484void Instrumentation::InstrumentThreadStack(Thread* thread, bool force_deopt) {
Mingyao Yang99170c62015-07-06 11:10:37 -0700485 instrumentation_stubs_installed_ = true;
Mythri Alle5097f832021-11-02 14:52:30 +0000486 InstrumentationInstallStack(thread, this, force_deopt);
Mingyao Yang99170c62015-07-06 11:10:37 -0700487}
488
Ian Rogers62d6c772013-02-27 08:32:07 -0800489// Removes the instrumentation exit pc as the return PC for every quick frame.
490static void InstrumentationRestoreStack(Thread* thread, void* arg)
Nicolas Geoffray5a23d2e2015-11-03 18:58:57 +0000491 REQUIRES(Locks::mutator_lock_) {
492 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
493
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100494 struct RestoreStackVisitor final : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800495 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -0800496 Instrumentation* instrumentation)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100497 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
498 thread_(thread_in),
Ian Rogers62d6c772013-02-27 08:32:07 -0800499 instrumentation_exit_pc_(instrumentation_exit_pc),
500 instrumentation_(instrumentation),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800501 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Ian Rogers62d6c772013-02-27 08:32:07 -0800502 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800503
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100504 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800505 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800506 return false; // Stop.
507 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700508 ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700509 if (GetCurrentQuickFrame() == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800510 if (kVerboseInstrumentation) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200511 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
David Sehr709b0702016-10-13 09:12:37 -0700512 << " Method=" << ArtMethod::PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800513 }
514 return true; // Ignore shadow frames.
515 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700516 if (m == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800517 if (kVerboseInstrumentation) {
518 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
519 }
Ian Rogers306057f2012-11-26 12:45:53 -0800520 return true; // Ignore upcalls.
521 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000522 auto it = instrumentation_stack_->find(GetReturnPcAddr());
523 if (it != instrumentation_stack_->end()) {
524 const InstrumentationStackFrame& instrumentation_frame = it->second;
525 if (kVerboseInstrumentation) {
526 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
Ian Rogers62d6c772013-02-27 08:32:07 -0800527 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000528 if (instrumentation_frame.interpreter_entry_) {
529 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
530 } else {
531 CHECK_EQ(m->GetNonObsoleteMethod(),
532 instrumentation_frame.method_->GetNonObsoleteMethod())
533 << ArtMethod::PrettyMethod(m)
534 << " and " << instrumentation_frame.method_->GetNonObsoleteMethod()->PrettyMethod();
535 }
536 SetReturnPc(instrumentation_frame.return_pc_);
537 if (instrumentation_->ShouldNotifyMethodEnterExitEvents() &&
538 !m->IsRuntimeMethod()) {
539 // Create the method exit events. As the methods didn't really exit the result is 0.
540 // We only do this if no debugger is attached to prevent from posting events twice.
541 JValue val;
Mythri Alle18fba4c2021-10-27 10:00:55 +0000542 instrumentation_->MethodExitEvent(thread_, m, OptionalFrame{}, val);
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000543 }
544 frames_removed_++;
545 } else {
Ian Rogers62d6c772013-02-27 08:32:07 -0800546 if (kVerboseInstrumentation) {
547 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800548 }
jeffhao725a9572012-11-13 18:20:12 -0800549 }
550 return true; // Continue.
551 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800552 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800553 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800554 Instrumentation* const instrumentation_;
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000555 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800556 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800557 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800558 if (kVerboseInstrumentation) {
559 std::string thread_name;
560 thread->GetThreadName(thread_name);
561 LOG(INFO) << "Removing exit stubs in " << thread_name;
562 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000563 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
564 thread->GetInstrumentationStack();
Ian Rogers62d6c772013-02-27 08:32:07 -0800565 if (stack->size() > 0) {
566 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700567 uintptr_t instrumentation_exit_pc =
568 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Ian Rogers62d6c772013-02-27 08:32:07 -0800569 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
570 visitor.WalkStack(true);
571 CHECK_EQ(visitor.frames_removed_, stack->size());
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000572 stack->clear();
jeffhao725a9572012-11-13 18:20:12 -0800573 }
574}
575
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000576void Instrumentation::DeoptimizeAllThreadFrames() {
577 Thread* self = Thread::Current();
578 MutexLock mu(self, *Locks::thread_list_lock_);
579 ThreadList* tl = Runtime::Current()->GetThreadList();
580 tl->ForEach([&](Thread* t) {
581 Locks::mutator_lock_->AssertExclusiveHeld(self);
Mythri Alle5097f832021-11-02 14:52:30 +0000582 InstrumentThreadStack(t, /* deopt_all_frames= */ true);
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000583 });
584 current_force_deopt_id_++;
585}
586
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200587static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
588 return (events & expected) != 0;
589}
590
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000591static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
592 uint32_t events,
593 std::list<InstrumentationListener*>& list,
594 InstrumentationListener* listener,
595 bool* has_listener)
596 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
597 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
598 if (!HasEvent(event, events)) {
599 return;
600 }
601 // If there is a free slot in the list, we insert the listener in that slot.
602 // Otherwise we add it to the end of the list.
603 auto it = std::find(list.begin(), list.end(), nullptr);
604 if (it != list.end()) {
605 *it = listener;
606 } else {
607 list.push_back(listener);
608 }
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +0100609 *has_listener = true;
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000610}
611
Ian Rogers62d6c772013-02-27 08:32:07 -0800612void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
613 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000614 PotentiallyAddListenerTo(kMethodEntered,
615 events,
616 method_entry_listeners_,
617 listener,
618 &have_method_entry_listeners_);
619 PotentiallyAddListenerTo(kMethodExited,
620 events,
621 method_exit_listeners_,
622 listener,
623 &have_method_exit_listeners_);
624 PotentiallyAddListenerTo(kMethodUnwind,
625 events,
626 method_unwind_listeners_,
627 listener,
628 &have_method_unwind_listeners_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000629 PotentiallyAddListenerTo(kBranch,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000630 events,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000631 branch_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000632 listener,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000633 &have_branch_listeners_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000634 PotentiallyAddListenerTo(kDexPcMoved,
635 events,
636 dex_pc_listeners_,
637 listener,
638 &have_dex_pc_listeners_);
639 PotentiallyAddListenerTo(kFieldRead,
640 events,
641 field_read_listeners_,
642 listener,
643 &have_field_read_listeners_);
644 PotentiallyAddListenerTo(kFieldWritten,
645 events,
646 field_write_listeners_,
647 listener,
648 &have_field_write_listeners_);
Alex Light6e1607e2017-08-23 10:06:18 -0700649 PotentiallyAddListenerTo(kExceptionThrown,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000650 events,
Alex Light6e1607e2017-08-23 10:06:18 -0700651 exception_thrown_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000652 listener,
Alex Light6e1607e2017-08-23 10:06:18 -0700653 &have_exception_thrown_listeners_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700654 PotentiallyAddListenerTo(kWatchedFramePop,
655 events,
656 watched_frame_pop_listeners_,
657 listener,
658 &have_watched_frame_pop_listeners_);
Alex Light9fb1ab12017-09-05 09:32:49 -0700659 PotentiallyAddListenerTo(kExceptionHandled,
660 events,
661 exception_handled_listeners_,
662 listener,
663 &have_exception_handled_listeners_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200664 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800665}
666
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000667static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
668 uint32_t events,
669 std::list<InstrumentationListener*>& list,
670 InstrumentationListener* listener,
671 bool* has_listener)
672 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
673 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
674 if (!HasEvent(event, events)) {
675 return;
676 }
677 auto it = std::find(list.begin(), list.end(), listener);
678 if (it != list.end()) {
679 // Just update the entry, do not remove from the list. Removing entries in the list
680 // is unsafe when mutators are iterating over it.
681 *it = nullptr;
682 }
683
684 // Check if the list contains any non-null listener, and update 'has_listener'.
685 for (InstrumentationListener* l : list) {
686 if (l != nullptr) {
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +0100687 *has_listener = true;
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000688 return;
689 }
690 }
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +0100691 *has_listener = false;
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000692}
693
Ian Rogers62d6c772013-02-27 08:32:07 -0800694void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
695 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000696 PotentiallyRemoveListenerFrom(kMethodEntered,
697 events,
698 method_entry_listeners_,
699 listener,
700 &have_method_entry_listeners_);
701 PotentiallyRemoveListenerFrom(kMethodExited,
702 events,
703 method_exit_listeners_,
704 listener,
705 &have_method_exit_listeners_);
706 PotentiallyRemoveListenerFrom(kMethodUnwind,
707 events,
708 method_unwind_listeners_,
709 listener,
710 &have_method_unwind_listeners_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000711 PotentiallyRemoveListenerFrom(kBranch,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000712 events,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000713 branch_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000714 listener,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000715 &have_branch_listeners_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000716 PotentiallyRemoveListenerFrom(kDexPcMoved,
717 events,
718 dex_pc_listeners_,
719 listener,
720 &have_dex_pc_listeners_);
721 PotentiallyRemoveListenerFrom(kFieldRead,
722 events,
723 field_read_listeners_,
724 listener,
725 &have_field_read_listeners_);
726 PotentiallyRemoveListenerFrom(kFieldWritten,
727 events,
728 field_write_listeners_,
729 listener,
730 &have_field_write_listeners_);
Alex Light6e1607e2017-08-23 10:06:18 -0700731 PotentiallyRemoveListenerFrom(kExceptionThrown,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000732 events,
Alex Light6e1607e2017-08-23 10:06:18 -0700733 exception_thrown_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000734 listener,
Alex Light6e1607e2017-08-23 10:06:18 -0700735 &have_exception_thrown_listeners_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700736 PotentiallyRemoveListenerFrom(kWatchedFramePop,
737 events,
738 watched_frame_pop_listeners_,
739 listener,
740 &have_watched_frame_pop_listeners_);
Alex Light9fb1ab12017-09-05 09:32:49 -0700741 PotentiallyRemoveListenerFrom(kExceptionHandled,
742 events,
743 exception_handled_listeners_,
744 listener,
745 &have_exception_handled_listeners_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200746 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800747}
748
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200749Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
Mythri Alle9575c122021-11-12 12:04:41 +0000750 return instrumentation_level_;
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200751}
752
Alex Lightdba61482016-12-21 08:20:29 -0800753bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
Alex Light4ba388a2017-01-27 10:26:49 -0800754 // We need to reinstall instrumentation if we go to a different level.
755 return GetCurrentInstrumentationLevel() != new_level;
Alex Lightdba61482016-12-21 08:20:29 -0800756}
757
Alex Light40607862019-05-06 18:16:24 +0000758void Instrumentation::UpdateInstrumentationLevels(InstrumentationLevel level) {
759 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
760 can_use_instrumentation_trampolines_ = false;
761 }
762 if (UNLIKELY(!can_use_instrumentation_trampolines_)) {
763 for (auto& p : requested_instrumentation_levels_) {
764 if (p.second == InstrumentationLevel::kInstrumentWithInstrumentationStubs) {
765 p.second = InstrumentationLevel::kInstrumentWithInterpreter;
766 }
767 }
768 }
769}
770
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200771void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
772 // Store the instrumentation level for this key or remove it.
773 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
774 // The client no longer needs instrumentation.
775 requested_instrumentation_levels_.erase(key);
776 } else {
777 // The client needs instrumentation.
778 requested_instrumentation_levels_.Overwrite(key, desired_level);
779 }
780
Alex Light40607862019-05-06 18:16:24 +0000781 UpdateInstrumentationLevels(desired_level);
782 UpdateStubs();
783}
784
785void Instrumentation::EnableSingleThreadDeopt() {
786 // Single-thread deopt only uses interpreter.
787 can_use_instrumentation_trampolines_ = false;
788 UpdateInstrumentationLevels(InstrumentationLevel::kInstrumentWithInterpreter);
789 UpdateStubs();
790}
791
Mythri Alle9575c122021-11-12 12:04:41 +0000792void Instrumentation::UpdateInstrumentationLevel(InstrumentationLevel requested_level) {
793 instrumentation_level_ = requested_level;
794}
795
Alex Light40607862019-05-06 18:16:24 +0000796void Instrumentation::UpdateStubs() {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200797 // Look for the highest required instrumentation level.
798 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
799 for (const auto& v : requested_instrumentation_levels_) {
800 requested_level = std::max(requested_level, v.second);
801 }
802
Alex Light40607862019-05-06 18:16:24 +0000803 DCHECK(can_use_instrumentation_trampolines_ ||
804 requested_level != InstrumentationLevel::kInstrumentWithInstrumentationStubs)
805 << "Use trampolines: " << can_use_instrumentation_trampolines_ << " level "
806 << requested_level;
807
Alex Lightdba61482016-12-21 08:20:29 -0800808 if (!RequiresInstrumentationInstallation(requested_level)) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800809 // We're already set.
810 return;
811 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100812 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800813 Runtime* runtime = Runtime::Current();
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100814 Locks::mutator_lock_->AssertExclusiveHeld(self);
Ian Rogers62d6c772013-02-27 08:32:07 -0800815 Locks::thread_list_lock_->AssertNotHeld(self);
Mythri Alle9575c122021-11-12 12:04:41 +0000816 UpdateInstrumentationLevel(requested_level);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200817 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700818 InstallStubsClassVisitor visitor(this);
819 runtime->GetClassLinker()->VisitClasses(&visitor);
Ian Rogers62d6c772013-02-27 08:32:07 -0800820 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100821 MutexLock mu(self, *Locks::thread_list_lock_);
Mythri Alle5097f832021-11-02 14:52:30 +0000822 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
823 InstrumentThreadStack(thread, /* deopt_all_frames= */ false);
824 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800825 } else {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700826 InstallStubsClassVisitor visitor(this);
827 runtime->GetClassLinker()->VisitClasses(&visitor);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100828 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700829 bool empty;
830 {
Andreas Gampe7e56a072018-11-29 10:40:06 -0800831 ReaderMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700832 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700833 }
834 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100835 MutexLock mu(self, *Locks::thread_list_lock_);
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000836 bool no_remaining_deopts = true;
837 // Check that there are no other forced deoptimizations. Do it here so we only need to lock
838 // thread_list_lock once.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000839 // The compiler gets confused on the thread annotations, so use
840 // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
841 // exclusively at this point.
842 Locks::mutator_lock_->AssertExclusiveHeld(self);
843 runtime->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000844 no_remaining_deopts =
845 no_remaining_deopts && !t->IsForceInterpreter() &&
846 std::all_of(t->GetInstrumentationStack()->cbegin(),
847 t->GetInstrumentationStack()->cend(),
848 [&](const auto& frame) REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000849 return frame.second.force_deopt_id_ == current_force_deopt_id_;
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000850 });
851 });
852 if (no_remaining_deopts) {
853 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
854 // Only do this after restoring, as walking the stack when restoring will see
855 // the instrumentation exit pc.
856 instrumentation_stubs_installed_ = false;
857 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100858 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800859 }
jeffhao725a9572012-11-13 18:20:12 -0800860}
861
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200862static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
Lokesh Gidra7e678d32020-04-28 16:17:49 -0700863 thread->ResetQuickAllocEntryPointsForThread();
Ian Rogersfa824272013-11-05 16:12:57 -0800864}
865
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700866void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
867 Thread* self = Thread::Current();
Mathieu Chartier661974a2014-01-09 11:23:53 -0800868 Runtime* runtime = Runtime::Current();
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700869 Locks::mutator_lock_->AssertNotHeld(self);
870 Locks::instrument_entrypoints_lock_->AssertHeld(self);
871 if (runtime->IsStarted()) {
Mathieu Chartier4f55e222015-09-04 13:26:21 -0700872 ScopedSuspendAll ssa(__FUNCTION__);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700873 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
Mathieu Chartier661974a2014-01-09 11:23:53 -0800874 SetQuickAllocEntryPointsInstrumented(instrumented);
875 ResetQuickAllocEntryPoints();
Mathieu Chartier50e93312016-03-16 11:25:29 -0700876 alloc_entrypoints_instrumented_ = instrumented;
Mathieu Chartier4f55e222015-09-04 13:26:21 -0700877 } else {
878 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
879 SetQuickAllocEntryPointsInstrumented(instrumented);
Andreas Gampe157c77e2016-10-17 17:44:41 -0700880
881 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
882 // update for just this thread.
Andreas Gampe162ae502016-10-18 10:03:42 -0700883 // Note: self may be null. One of those paths is setting instrumentation in the Heap
884 // constructor for gcstress mode.
885 if (self != nullptr) {
886 ResetQuickAllocEntryPointsForThread(self, nullptr);
887 }
Andreas Gampe157c77e2016-10-17 17:44:41 -0700888
Mathieu Chartier50e93312016-03-16 11:25:29 -0700889 alloc_entrypoints_instrumented_ = instrumented;
Mathieu Chartier661974a2014-01-09 11:23:53 -0800890 }
891}
892
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700893void Instrumentation::InstrumentQuickAllocEntryPoints() {
894 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
895 InstrumentQuickAllocEntryPointsLocked();
Ian Rogersfa824272013-11-05 16:12:57 -0800896}
897
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700898void Instrumentation::UninstrumentQuickAllocEntryPoints() {
899 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
900 UninstrumentQuickAllocEntryPointsLocked();
901}
902
903void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
904 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
905 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
906 SetEntrypointsInstrumented(true);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800907 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700908 ++quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700909}
910
911void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
912 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
913 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
914 --quick_alloc_entry_points_instrumentation_counter_;
915 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
916 SetEntrypointsInstrumented(false);
917 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800918}
919
920void Instrumentation::ResetQuickAllocEntryPoints() {
921 Runtime* runtime = Runtime::Current();
922 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800923 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700924 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
Ian Rogersfa824272013-11-05 16:12:57 -0800925 }
926}
927
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700928void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800929 const void* new_quick_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800930 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800931 new_quick_code = quick_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700932 } else {
Mythri Alle9575c122021-11-12 12:04:41 +0000933 if ((InterpreterStubsInstalled() || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800934 new_quick_code = GetQuickToInterpreterBridge();
Jeff Hao65d15d92013-07-16 16:39:33 -0700935 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700936 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700937 if (class_linker->IsQuickResolutionStub(quick_code) ||
938 class_linker->IsQuickToInterpreterBridge(quick_code)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700939 new_quick_code = quick_code;
Mythri Alle9575c122021-11-12 12:04:41 +0000940 } else if (EntryExitStubsInstalled() &&
Alex Light6cae5ea2018-06-07 17:07:02 -0700941 // We need to make sure not to replace anything that InstallStubsForMethod
942 // wouldn't. Specifically we cannot stub out Proxy.<init> since subtypes copy the
943 // implementation directly and this will confuse the instrumentation trampolines.
944 // TODO We should remove the need for this since it makes it impossible to profile
945 // Proxy.<init> correctly in all cases.
Mythri Alle5097f832021-11-02 14:52:30 +0000946 method != jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init) &&
947 CodeNeedsEntryExitStub(quick_code, method)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700948 new_quick_code = GetQuickInstrumentationEntryPoint();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700949 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700950 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700951 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700952 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800953 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800954 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100955}
956
Nicolas Geoffraya6e0e7d2018-01-26 13:16:50 +0000957void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code) {
958 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
959 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
960 // the ArtMethod is still in memory.
961 const void* new_quick_code = quick_code;
Mythri Alle9575c122021-11-12 12:04:41 +0000962 if (UNLIKELY(instrumentation_stubs_installed_) && EntryExitStubsInstalled()) {
Nicolas Geoffraya6e0e7d2018-01-26 13:16:50 +0000963 new_quick_code = GetQuickInstrumentationEntryPoint();
964 }
965 UpdateEntrypoints(method, new_quick_code);
966}
967
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700968void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
969 DCHECK(method->GetDeclaringClass()->IsResolved());
970 UpdateMethodsCodeImpl(method, quick_code);
971}
972
Alex Light0a5ec3d2017-07-25 16:50:26 -0700973void Instrumentation::UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method) {
974 UpdateMethodsCodeImpl(method, GetQuickToInterpreterBridge());
975}
976
Nicolas Geoffraya0619e22016-12-20 13:57:43 +0000977void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
978 const void* quick_code) {
979 // When the runtime is set to Java debuggable, we may update the entry points of
980 // all methods of a class to the interpreter bridge. A method's declaring class
981 // might not be in resolved state yet in that case, so we bypass the DCHECK in
982 // UpdateMethodsCode.
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700983 UpdateMethodsCodeImpl(method, quick_code);
984}
985
Mathieu Chartiere401d142015-04-22 13:56:20 -0700986bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
987 if (IsDeoptimizedMethod(method)) {
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700988 // Already in the map. Return.
989 return false;
990 }
991 // Not found. Add it.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700992 deoptimized_methods_.insert(method);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700993 return true;
994}
995
Mathieu Chartiere401d142015-04-22 13:56:20 -0700996bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
997 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700998}
999
Mathieu Chartiere401d142015-04-22 13:56:20 -07001000ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
1001 if (deoptimized_methods_.empty()) {
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001002 // Empty.
1003 return nullptr;
1004 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001005 return *deoptimized_methods_.begin();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001006}
1007
Mathieu Chartiere401d142015-04-22 13:56:20 -07001008bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1009 auto it = deoptimized_methods_.find(method);
1010 if (it == deoptimized_methods_.end()) {
1011 return false;
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001012 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001013 deoptimized_methods_.erase(it);
1014 return true;
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001015}
1016
1017bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1018 return deoptimized_methods_.empty();
1019}
1020
Mathieu Chartiere401d142015-04-22 13:56:20 -07001021void Instrumentation::Deoptimize(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001022 CHECK(!method->IsNative());
1023 CHECK(!method->IsProxyMethod());
Alex Light9139e002015-10-09 15:59:48 -07001024 CHECK(method->IsInvokable());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001025
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001026 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001027 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001028 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001029 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
David Sehr709b0702016-10-13 09:12:37 -07001030 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
Daniel Mihalyica1d06c2014-08-18 18:45:31 +02001031 << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001032 }
Mythri Alle9575c122021-11-12 12:04:41 +00001033 if (!InterpreterStubsInstalled()) {
Elliott Hughes956af0f2014-12-11 14:34:28 -08001034 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001035
1036 // Install instrumentation exit stub and instrumentation frames. We may already have installed
1037 // these previously so it will only cover the newly created frames.
1038 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001039 MutexLock mu(self, *Locks::thread_list_lock_);
Mythri Alle5097f832021-11-02 14:52:30 +00001040 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
1041 // This isn't a strong deopt. We deopt this method if it is still in the
1042 // deopt methods list. If by the time we hit this frame we no longer need
1043 // a deopt it is safe to continue. So we don't mark the frame.
1044 InstrumentThreadStack(thread, /* deopt_all_frames= */ false);
1045 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001046 }
1047}
1048
Mathieu Chartiere401d142015-04-22 13:56:20 -07001049void Instrumentation::Undeoptimize(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001050 CHECK(!method->IsNative());
1051 CHECK(!method->IsProxyMethod());
Alex Light9139e002015-10-09 15:59:48 -07001052 CHECK(method->IsInvokable());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001053
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001054 Thread* self = Thread::Current();
1055 bool empty;
1056 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001057 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001058 bool found_and_erased = RemoveDeoptimizedMethod(method);
David Sehr709b0702016-10-13 09:12:37 -07001059 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001060 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001061 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001062 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001063
1064 // Restore code and possibly stack only if we did not deoptimize everything.
Mythri Alle9575c122021-11-12 12:04:41 +00001065 if (!InterpreterStubsInstalled()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001066 // Restore its code or resolution trampoline.
1067 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -08001068 if (method->IsStatic() && !method->IsConstructor() &&
1069 !method->GetDeclaringClass()->IsInitialized()) {
Elliott Hughes956af0f2014-12-11 14:34:28 -08001070 UpdateEntrypoints(method, GetQuickResolutionStub());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001071 } else {
Nicolas Geoffraya0619e22016-12-20 13:57:43 +00001072 const void* quick_code = NeedDebugVersionFor(method)
1073 ? GetQuickToInterpreterBridge()
Alex Lightfc49fec2018-01-16 22:28:36 +00001074 : class_linker->GetQuickOatCodeFor(method);
Elliott Hughes956af0f2014-12-11 14:34:28 -08001075 UpdateEntrypoints(method, quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001076 }
1077
1078 // If there is no deoptimized method left, we can restore the stack of each thread.
Mythri Alle9575c122021-11-12 12:04:41 +00001079 if (empty && !EntryExitStubsInstalled()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001080 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001081 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
1082 instrumentation_stubs_installed_ = false;
1083 }
1084 }
1085}
1086
Mathieu Chartiere401d142015-04-22 13:56:20 -07001087bool Instrumentation::IsDeoptimized(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001088 DCHECK(method != nullptr);
Andreas Gampe7e56a072018-11-29 10:40:06 -08001089 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001090 return IsDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001091}
1092
1093void Instrumentation::EnableDeoptimization() {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001094 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001095 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001096 CHECK_EQ(deoptimization_enabled_, false);
1097 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001098}
1099
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001100void Instrumentation::DisableDeoptimization(const char* key) {
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001101 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001102 // If we deoptimized everything, undo it.
Alex Lightdba61482016-12-21 08:20:29 -08001103 InstrumentationLevel level = GetCurrentInstrumentationLevel();
1104 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001105 UndeoptimizeEverything(key);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001106 }
1107 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001108 while (true) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001109 ArtMethod* method;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001110 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001111 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001112 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001113 break;
1114 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001115 method = BeginDeoptimizedMethod();
1116 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001117 }
1118 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001119 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001120 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001121}
1122
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001123// Indicates if instrumentation should notify method enter/exit events to the listeners.
1124bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001125 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
1126 return false;
1127 }
Mythri Alle9575c122021-11-12 12:04:41 +00001128 return !deoptimization_enabled_ && !InterpreterStubsInstalled();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001129}
1130
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001131void Instrumentation::DeoptimizeEverything(const char* key) {
1132 CHECK(deoptimization_enabled_);
1133 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001134}
1135
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001136void Instrumentation::UndeoptimizeEverything(const char* key) {
Mythri Alle9575c122021-11-12 12:04:41 +00001137 CHECK(InterpreterStubsInstalled());
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001138 CHECK(deoptimization_enabled_);
1139 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001140}
1141
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001142void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
1143 InstrumentationLevel level;
1144 if (needs_interpreter) {
1145 level = InstrumentationLevel::kInstrumentWithInterpreter;
1146 } else {
1147 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
1148 }
1149 ConfigureStubs(key, level);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001150}
1151
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001152void Instrumentation::DisableMethodTracing(const char* key) {
1153 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
jeffhao725a9572012-11-13 18:20:12 -08001154}
1155
Alex Light2d441b12018-06-08 15:33:21 -07001156const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) const {
1157 // This is called by instrumentation entry only and that should never be getting proxy methods.
1158 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1159 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mythri Alle9575c122021-11-12 12:04:41 +00001160 if (LIKELY(!InterpreterStubsInstalled())) {
Alex Light2d441b12018-06-08 15:33:21 -07001161 // In general we just return whatever the method thinks its entrypoint is here. The only
1162 // exception is if it still has the instrumentation entrypoint. That means we are racing another
1163 // thread getting rid of instrumentation which is unexpected but possible. In that case we want
1164 // to wait and try to get it from the oat file or jit.
1165 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1166 DCHECK(code != nullptr);
1167 if (code != GetQuickInstrumentationEntryPoint()) {
1168 return code;
Alex Light2d441b12018-06-08 15:33:21 -07001169 }
1170 // We don't know what it is. Fallthough to try to find the code from the JIT or Oat file.
Mythri Alle9575c122021-11-12 12:04:41 +00001171 }
1172
1173 if (method->IsNative()) {
Alex Light2d441b12018-06-08 15:33:21 -07001174 // TODO We could have JIT compiled native entrypoints. It might be worth it to find these.
1175 return class_linker->GetQuickOatCodeFor(method);
Mythri Alle9575c122021-11-12 12:04:41 +00001176 } else if (!NeedDebugVersionFor(method) && !InterpreterStubsInstalled()) {
1177 return class_linker->GetQuickOatCodeFor(method);
1178 } else {
Alex Light2d441b12018-06-08 15:33:21 -07001179 return GetQuickToInterpreterBridge();
1180 }
Alex Light2d441b12018-06-08 15:33:21 -07001181}
1182
Andreas Gampe542451c2016-07-26 09:02:02 -07001183const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01001184 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers62d6c772013-02-27 08:32:07 -08001185 if (LIKELY(!instrumentation_stubs_installed_)) {
Mathieu Chartiera7dd0382014-11-20 17:08:58 -08001186 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
Vladimir Marko8a630572014-04-09 18:45:35 +01001187 DCHECK(code != nullptr);
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001188 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
1189 !class_linker->IsQuickToInterpreterBridge(code)) &&
1190 !class_linker->IsQuickResolutionStub(code) &&
1191 !class_linker->IsQuickToInterpreterBridge(code)) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001192 return code;
1193 }
1194 }
Alex Lightfc49fec2018-01-16 22:28:36 +00001195 return class_linker->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -08001196}
1197
Mythri Alle9cc65df2021-09-21 15:09:58 +00001198void Instrumentation::MethodEnterEventImpl(Thread* thread, ArtMethod* method) const {
Mingyao Yang2ee17902017-08-30 11:37:08 -07001199 DCHECK(!method->IsRuntimeMethod());
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001200 if (HasMethodEntryListeners()) {
1201 for (InstrumentationListener* listener : method_entry_listeners_) {
1202 if (listener != nullptr) {
Mythri Alle9cc65df2021-09-21 15:09:58 +00001203 listener->MethodEntered(thread, method);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001204 }
1205 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001206 }
1207}
1208
Alex Lightb7c640d2019-03-20 15:52:13 -07001209template <>
Alex Lightd7661582017-05-01 13:48:16 -07001210void Instrumentation::MethodExitEventImpl(Thread* thread,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001211 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -07001212 OptionalFrame frame,
1213 MutableHandle<mirror::Object>& return_value) const {
1214 if (HasMethodExitListeners()) {
Alex Lightb7c640d2019-03-20 15:52:13 -07001215 for (InstrumentationListener* listener : method_exit_listeners_) {
1216 if (listener != nullptr) {
Mythri Alle18fba4c2021-10-27 10:00:55 +00001217 listener->MethodExited(thread, method, frame, return_value);
Alex Lightb7c640d2019-03-20 15:52:13 -07001218 }
1219 }
1220 }
1221}
1222
1223template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
Alex Lightb7c640d2019-03-20 15:52:13 -07001224 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -07001225 OptionalFrame frame,
1226 JValue& return_value) const {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001227 if (HasMethodExitListeners()) {
Alex Lightd7661582017-05-01 13:48:16 -07001228 Thread* self = Thread::Current();
Mythri Alle18fba4c2021-10-27 10:00:55 +00001229 StackHandleScope<1> hs(self);
Alex Lightb7c640d2019-03-20 15:52:13 -07001230 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1231 Primitive::kPrimNot) {
Alex Lightd7661582017-05-01 13:48:16 -07001232 for (InstrumentationListener* listener : method_exit_listeners_) {
1233 if (listener != nullptr) {
Mythri Alle18fba4c2021-10-27 10:00:55 +00001234 listener->MethodExited(thread, method, frame, return_value);
Alex Lightd7661582017-05-01 13:48:16 -07001235 }
1236 }
1237 } else {
Alex Lightb7c640d2019-03-20 15:52:13 -07001238 MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
Mythri Alle18fba4c2021-10-27 10:00:55 +00001239 MethodExitEventImpl(thread, method, frame, ret);
Alex Lightb7c640d2019-03-20 15:52:13 -07001240 return_value.SetL(ret.Get());
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001241 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001242 }
1243}
1244
Alex Lightd7661582017-05-01 13:48:16 -07001245void Instrumentation::MethodUnwindEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001246 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001247 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001248 uint32_t dex_pc) const {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001249 if (HasMethodUnwindListeners()) {
Alex Lightd7661582017-05-01 13:48:16 -07001250 Thread* self = Thread::Current();
1251 StackHandleScope<1> hs(self);
1252 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Mathieu Chartier02e25112013-08-14 16:14:24 -07001253 for (InstrumentationListener* listener : method_unwind_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001254 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001255 listener->MethodUnwind(thread, thiz, method, dex_pc);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001256 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001257 }
1258 }
1259}
1260
Alex Lightd7661582017-05-01 13:48:16 -07001261void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1262 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001263 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001264 uint32_t dex_pc) const {
Alex Lightd7661582017-05-01 13:48:16 -07001265 Thread* self = Thread::Current();
1266 StackHandleScope<1> hs(self);
1267 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001268 for (InstrumentationListener* listener : dex_pc_listeners_) {
1269 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001270 listener->DexPcMoved(thread, thiz, method, dex_pc);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001271 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001272 }
1273}
1274
Nicolas Geoffray81f0f952016-01-20 16:25:19 +00001275void Instrumentation::BranchImpl(Thread* thread,
1276 ArtMethod* method,
1277 uint32_t dex_pc,
1278 int32_t offset) const {
1279 for (InstrumentationListener* listener : branch_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001280 if (listener != nullptr) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +00001281 listener->Branch(thread, method, dex_pc, offset);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001282 }
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08001283 }
1284}
1285
Alex Lighte814f9d2017-07-31 16:14:39 -07001286void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1287 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1288 if (listener != nullptr) {
1289 listener->WatchedFramePop(thread, frame);
1290 }
1291 }
1292}
1293
Alex Lightd7661582017-05-01 13:48:16 -07001294void Instrumentation::FieldReadEventImpl(Thread* thread,
1295 ObjPtr<mirror::Object> this_object,
1296 ArtMethod* method,
1297 uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001298 ArtField* field) const {
Alex Lightd7661582017-05-01 13:48:16 -07001299 Thread* self = Thread::Current();
1300 StackHandleScope<1> hs(self);
1301 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001302 for (InstrumentationListener* listener : field_read_listeners_) {
1303 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001304 listener->FieldRead(thread, thiz, method, dex_pc, field);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001305 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +02001306 }
1307}
1308
Alex Lightd7661582017-05-01 13:48:16 -07001309void Instrumentation::FieldWriteEventImpl(Thread* thread,
1310 ObjPtr<mirror::Object> this_object,
1311 ArtMethod* method,
1312 uint32_t dex_pc,
1313 ArtField* field,
1314 const JValue& field_value) const {
1315 Thread* self = Thread::Current();
1316 StackHandleScope<2> hs(self);
1317 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1318 if (field->IsPrimitiveType()) {
1319 for (InstrumentationListener* listener : field_write_listeners_) {
1320 if (listener != nullptr) {
1321 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1322 }
1323 }
1324 } else {
1325 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1326 for (InstrumentationListener* listener : field_write_listeners_) {
1327 if (listener != nullptr) {
1328 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1329 }
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001330 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +02001331 }
1332}
1333
Alex Light6e1607e2017-08-23 10:06:18 -07001334void Instrumentation::ExceptionThrownEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001335 ObjPtr<mirror::Throwable> exception_object) const {
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001336 Thread* self = Thread::Current();
1337 StackHandleScope<1> hs(self);
1338 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
Alex Light6e1607e2017-08-23 10:06:18 -07001339 if (HasExceptionThrownListeners()) {
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001340 DCHECK_EQ(thread->GetException(), h_exception.Get());
Jeff Haoc0bd4da2013-04-11 15:52:28 -07001341 thread->ClearException();
Alex Light6e1607e2017-08-23 10:06:18 -07001342 for (InstrumentationListener* listener : exception_thrown_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001343 if (listener != nullptr) {
Alex Light6e1607e2017-08-23 10:06:18 -07001344 listener->ExceptionThrown(thread, h_exception);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001345 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001346 }
Alex Light9fb1ab12017-09-05 09:32:49 -07001347 // See b/65049545 for discussion about this behavior.
1348 thread->AssertNoPendingException();
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001349 thread->SetException(h_exception.Get());
Ian Rogers62d6c772013-02-27 08:32:07 -08001350 }
1351}
1352
Alex Light9fb1ab12017-09-05 09:32:49 -07001353void Instrumentation::ExceptionHandledEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001354 ObjPtr<mirror::Throwable> exception_object) const {
Alex Light9fb1ab12017-09-05 09:32:49 -07001355 Thread* self = Thread::Current();
1356 StackHandleScope<1> hs(self);
1357 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1358 if (HasExceptionHandledListeners()) {
1359 // We should have cleared the exception so that callers can detect a new one.
1360 DCHECK(thread->GetException() == nullptr);
1361 for (InstrumentationListener* listener : exception_handled_listeners_) {
1362 if (listener != nullptr) {
1363 listener->ExceptionHandled(thread, h_exception);
1364 }
1365 }
1366 }
1367}
1368
Vladimir Marko19711d42019-04-12 14:05:34 +01001369void Instrumentation::PushInstrumentationStackFrame(Thread* self,
1370 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001371 ArtMethod* method,
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001372 uintptr_t stack_ptr,
Vladimir Marko19711d42019-04-12 14:05:34 +01001373 uintptr_t lr,
1374 bool interpreter_entry) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001375 DCHECK(!self->IsExceptionPending());
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001376 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1377 self->GetInstrumentationStack();
Ian Rogers62d6c772013-02-27 08:32:07 -08001378 if (kVerboseInstrumentation) {
David Sehr709b0702016-10-13 09:12:37 -07001379 LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1380 << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001381 }
Alex Lightb7edcda2017-04-27 13:20:31 -07001382
1383 // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1384 // event causes an exception we can simply send the unwind event and return.
1385 StackHandleScope<1> hs(self);
1386 Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1387 if (!interpreter_entry) {
Mythri Alle9cc65df2021-09-21 15:09:58 +00001388 MethodEnterEvent(self, method);
Alex Lightb7edcda2017-04-27 13:20:31 -07001389 if (self->IsExceptionPending()) {
1390 MethodUnwindEvent(self, h_this.Get(), method, 0);
1391 return;
1392 }
1393 }
1394
1395 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1396 DCHECK(!self->IsExceptionPending());
1397 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1398
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00001399 instrumentation::InstrumentationStackFrame instrumentation_frame(
1400 h_this.Get(), method, lr, frame_id, interpreter_entry, current_force_deopt_id_);
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001401 stack->insert({stack_ptr, instrumentation_frame});
Ian Rogers62d6c772013-02-27 08:32:07 -08001402}
1403
Mingyao Yang2ee17902017-08-30 11:37:08 -07001404DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1405 if (method->IsRuntimeMethod()) {
1406 // Certain methods have strict requirement on whether the dex instruction
1407 // should be re-executed upon deoptimization.
1408 if (method == Runtime::Current()->GetCalleeSaveMethod(
1409 CalleeSaveType::kSaveEverythingForClinit)) {
1410 return DeoptimizationMethodType::kKeepDexPc;
1411 }
1412 if (method == Runtime::Current()->GetCalleeSaveMethod(
1413 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1414 return DeoptimizationMethodType::kKeepDexPc;
1415 }
1416 }
1417 return DeoptimizationMethodType::kDefault;
1418}
1419
1420// Try to get the shorty of a runtime method if it's an invocation stub.
Andreas Gampec7d878d2018-11-19 18:42:06 +00001421static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
1422 char shorty = 'V';
1423 StackVisitor::WalkStack(
1424 [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1425 ArtMethod* m = stack_visitor->GetMethod();
1426 if (m == nullptr || m->IsRuntimeMethod()) {
1427 return true;
Andreas Gampe3d477f32018-11-16 16:40:45 +00001428 }
Andreas Gampec7d878d2018-11-19 18:42:06 +00001429 // The first Java method.
1430 if (m->IsNative()) {
1431 // Use JNI method's shorty for the jni stub.
1432 shorty = m->GetShorty()[0];
1433 } else if (m->IsProxyMethod()) {
1434 // Proxy method just invokes its proxied method via
1435 // art_quick_proxy_invoke_handler.
1436 shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
1437 } else {
1438 const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
1439 if (instr.IsInvoke()) {
Nicolas Geoffray4924ea92021-03-23 08:25:31 +00001440 uint16_t method_index = static_cast<uint16_t>(instr.VRegB());
Andreas Gampec7d878d2018-11-19 18:42:06 +00001441 const DexFile* dex_file = m->GetDexFile();
1442 if (interpreter::IsStringInit(dex_file, method_index)) {
1443 // Invoking string init constructor is turned into invoking
1444 // StringFactory.newStringFromChars() which returns a string.
1445 shorty = 'L';
1446 } else {
1447 shorty = dex_file->GetMethodShorty(method_index)[0];
1448 }
1449
1450 } else {
1451 // It could be that a non-invoke opcode invokes a stub, which in turn
1452 // invokes Java code. In such cases, we should never expect a return
1453 // value from the stub.
1454 }
1455 }
1456 // Stop stack walking since we've seen a Java frame.
1457 return false;
1458 },
1459 thread,
1460 /* context= */ nullptr,
1461 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
1462 return shorty;
1463}
Mingyao Yang2ee17902017-08-30 11:37:08 -07001464
Mythri Alle5097f832021-11-02 14:52:30 +00001465JValue Instrumentation::GetReturnValue(
1466 Thread* self, ArtMethod* method, bool* is_ref, uint64_t* gpr_result, uint64_t* fpr_result) {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001467 uint32_t length;
Andreas Gampe542451c2016-07-26 09:02:02 -07001468 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
Mingyao Yang2ee17902017-08-30 11:37:08 -07001469 char return_shorty;
1470
1471 // Runtime method does not call into MethodExitEvent() so there should not be
1472 // suspension point below.
1473 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1474 if (method->IsRuntimeMethod()) {
Vladimir Marko46a89102021-10-21 13:05:46 +00001475 Runtime* runtime = Runtime::Current();
1476 if (method != runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForClinit) &&
1477 method != runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForSuspendCheck)) {
Mingyao Yang2ee17902017-08-30 11:37:08 -07001478 // If the caller is at an invocation point and the runtime method is not
1479 // for clinit, we need to pass return results to the caller.
1480 // We need the correct shorty to decide whether we need to pass the return
1481 // result for deoptimization below.
Andreas Gampec7d878d2018-11-19 18:42:06 +00001482 return_shorty = GetRuntimeMethodShorty(self);
Mingyao Yang2ee17902017-08-30 11:37:08 -07001483 } else {
1484 // Some runtime methods such as allocations, unresolved field getters, etc.
1485 // have return value. We don't need to set return_value since MethodExitEvent()
1486 // below isn't called for runtime methods. Deoptimization doesn't need the
1487 // value either since the dex instruction will be re-executed by the
1488 // interpreter, except these two cases:
1489 // (1) For an invoke, which is handled above to get the correct shorty.
1490 // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
1491 // idempotent. However there is no return value for it anyway.
1492 return_shorty = 'V';
1493 }
1494 } else {
1495 return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1496 }
1497
Mythri Alle5097f832021-11-02 14:52:30 +00001498 *is_ref = return_shorty == '[' || return_shorty == 'L';
Ian Rogers62d6c772013-02-27 08:32:07 -08001499 JValue return_value;
1500 if (return_shorty == 'V') {
1501 return_value.SetJ(0);
1502 } else if (return_shorty == 'F' || return_shorty == 'D') {
Alex Lightb7edcda2017-04-27 13:20:31 -07001503 return_value.SetJ(*fpr_result);
Ian Rogers62d6c772013-02-27 08:32:07 -08001504 } else {
Alex Lightb7edcda2017-04-27 13:20:31 -07001505 return_value.SetJ(*gpr_result);
1506 }
Mythri Alle5097f832021-11-02 14:52:30 +00001507 return return_value;
1508}
1509
1510bool Instrumentation::ShouldDeoptimizeMethod(Thread* self, const NthCallerVisitor& visitor) {
1511 bool should_deoptimize_frame = false;
1512 const OatQuickMethodHeader* header = visitor.GetCurrentOatQuickMethodHeader();
1513 if (header != nullptr && header->HasShouldDeoptimizeFlag()) {
1514 uint8_t should_deopt_flag = visitor.GetShouldDeoptimizeFlag();
1515 // DeoptimizeFlag could be set for debugging or for CHA invalidations.
1516 // Deoptimize here only if it was requested for debugging. CHA
1517 // invalidations are handled in the JITed code.
1518 if ((should_deopt_flag & static_cast<uint8_t>(DeoptimizeFlagValue::kDebug)) != 0) {
1519 should_deoptimize_frame = true;
1520 }
1521 }
1522 return (visitor.caller != nullptr) &&
Mythri Alle9575c122021-11-12 12:04:41 +00001523 (InterpreterStubsInstalled() || IsDeoptimized(visitor.caller) ||
Mythri Alle5097f832021-11-02 14:52:30 +00001524 self->IsForceInterpreter() ||
1525 // NB Since structurally obsolete compiled methods might have the offsets of
1526 // methods/fields compiled in we need to go back to interpreter whenever we hit
1527 // them.
1528 visitor.caller->GetDeclaringClass()->IsObsoleteObject() ||
1529 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller) ||
1530 should_deoptimize_frame);
1531}
1532
1533TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
1534 uintptr_t* return_pc_addr,
1535 uint64_t* gpr_result,
1536 uint64_t* fpr_result) {
1537 DCHECK(gpr_result != nullptr);
1538 DCHECK(fpr_result != nullptr);
1539 // Do the pop.
1540 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1541 self->GetInstrumentationStack();
1542 CHECK_GT(stack->size(), 0U);
1543 auto it = stack->find(reinterpret_cast<uintptr_t>(return_pc_addr));
1544 CHECK(it != stack->end());
1545 InstrumentationStackFrame instrumentation_frame = it->second;
1546 stack->erase(it);
1547
1548 // Set return PC and check the consistency of the stack.
1549 // We don't cache the return pc value in a local as it may change after
1550 // sending a method exit event.
1551 *return_pc_addr = instrumentation_frame.return_pc_;
1552 self->VerifyStack();
1553
1554 ArtMethod* method = instrumentation_frame.method_;
1555
1556 bool is_ref;
1557 JValue return_value = GetReturnValue(self, method, &is_ref, gpr_result, fpr_result);
1558 StackHandleScope<1> hs(self);
1559 MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
Alex Lightb7edcda2017-04-27 13:20:31 -07001560 if (is_ref) {
1561 // Take a handle to the return value so we won't lose it if we suspend.
Vladimir Marko46a89102021-10-21 13:05:46 +00001562 // FIXME: The `is_ref` is often guessed wrong, so even object aligment
1563 // assertion would fail for some tests. See b/204766614 .
1564 // DCHECK_ALIGNED(return_value.GetL(), kObjectAlignment);
Alex Lightb7edcda2017-04-27 13:20:31 -07001565 res.Assign(return_value.GetL());
Ian Rogers62d6c772013-02-27 08:32:07 -08001566 }
Mingyao Yang2ee17902017-08-30 11:37:08 -07001567 if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001568 // Note that sending the event may change the contents of *return_pc_addr.
Mythri Alle18fba4c2021-10-27 10:00:55 +00001569 MethodExitEvent(self, instrumentation_frame.method_, OptionalFrame{}, return_value);
Sebastien Hertz320deb22014-06-11 19:45:05 +02001570 }
jeffhao725a9572012-11-13 18:20:12 -08001571
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001572 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1573 // back to an upcall.
1574 NthCallerVisitor visitor(self, 1, true);
1575 visitor.WalkStack(true);
Mythri Alle5097f832021-11-02 14:52:30 +00001576 // Check if we forced all threads to deoptimize in the time between this frame being created and
1577 // now.
1578 bool should_deoptimize_frame = instrumentation_frame.force_deopt_id_ != current_force_deopt_id_;
1579 bool deoptimize = ShouldDeoptimizeMethod(self, visitor) || should_deoptimize_frame;
1580
Alex Lightb7edcda2017-04-27 13:20:31 -07001581 if (is_ref) {
1582 // Restore the return value if it's a reference since it might have moved.
1583 *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1584 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001585 if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001586 if (kVerboseInstrumentation) {
Andreas Gampe46ee31b2016-12-14 10:11:49 -08001587 LOG(INFO) << "Deoptimizing "
1588 << visitor.caller->PrettyMethod()
1589 << " by returning from "
1590 << method->PrettyMethod()
1591 << " with result "
1592 << std::hex << return_value.GetJ() << std::dec
1593 << " in "
1594 << *self;
Ian Rogers62d6c772013-02-27 08:32:07 -08001595 }
Mingyao Yang2ee17902017-08-30 11:37:08 -07001596 DeoptimizationMethodType deopt_method_type = GetDeoptimizationMethodType(method);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +01001597 self->PushDeoptimizationContext(return_value,
Mythri Alle5097f832021-11-02 14:52:30 +00001598 is_ref,
1599 /* exception= */ nullptr,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07001600 /* from_code= */ false,
Mingyao Yang2ee17902017-08-30 11:37:08 -07001601 deopt_method_type);
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001602 return GetTwoWordSuccessValue(*return_pc_addr,
Andreas Gamped58342c2014-06-05 14:18:08 -07001603 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -08001604 } else {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001605 if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
Alex Lightd8eb6732018-01-29 15:16:02 -08001606 VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001607 << " at PC " << reinterpret_cast<void*>(*return_pc_addr);
Nicolas Geoffraya0619e22016-12-20 13:57:43 +00001608 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001609 if (kVerboseInstrumentation) {
David Sehr709b0702016-10-13 09:12:37 -07001610 LOG(INFO) << "Returning from " << method->PrettyMethod()
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001611 << " to PC " << reinterpret_cast<void*>(*return_pc_addr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001612 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001613 return GetTwoWordSuccessValue(0, *return_pc_addr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001614 }
jeffhao725a9572012-11-13 18:20:12 -08001615}
1616
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001617uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, uintptr_t pop_until) const {
1618 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1619 self->GetInstrumentationStack();
1620 // Pop all instrumentation frames below `pop_until`.
1621 uintptr_t return_pc = 0u;
1622 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until;) {
1623 auto e = i;
1624 ++i;
1625 if (kVerboseInstrumentation) {
1626 LOG(INFO) << "Popping for deoptimization " << e->second.method_->PrettyMethod();
Mingyao Yang2ee17902017-08-30 11:37:08 -07001627 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001628 return_pc = e->second.return_pc_;
1629 stack->erase(e);
Ian Rogers62d6c772013-02-27 08:32:07 -08001630 }
Alex Light2c8206f2018-06-08 14:51:09 -07001631 return return_pc;
Ian Rogers62d6c772013-02-27 08:32:07 -08001632}
1633
1634std::string InstrumentationStackFrame::Dump() const {
1635 std::ostringstream os;
David Sehr709b0702016-10-13 09:12:37 -07001636 os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00001637 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_)
1638 << " force_deopt_id=" << force_deopt_id_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001639 return os.str();
1640}
1641
1642} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001643} // namespace art