blob: bcde9e5a2eef03fee99a974ac0ff94096a9a5dbc [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogersef7d42f2014-01-06 12:55:46 -080021#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080023#include "class_linker.h"
24#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080025#include "dex_file-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080026#include "entrypoints/quick/quick_alloc_entrypoints.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010027#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070028#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080029#include "mirror/class-inl.h"
30#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070032#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080033#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080034#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070035#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080036#endif
37#include "object_utils.h"
38#include "os.h"
39#include "scoped_thread_state_change.h"
40#include "thread.h"
41#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080042
43namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080044
Ian Rogers62d6c772013-02-27 08:32:07 -080045namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080046
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010047const bool kVerboseInstrumentation = false;
48
Ian Rogers816432e2013-09-06 15:47:45 -070049// Do we want to deoptimize for method entry and exit listeners or just try to intercept
50// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
51// application's performance.
Ian Rogers7b6da362013-09-11 09:29:40 -070052static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
Ian Rogers816432e2013-09-06 15:47:45 -070053
Ian Rogers62d6c772013-02-27 08:32:07 -080054static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080055 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080056 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
57 return instrumentation->InstallStubsForClass(klass);
58}
59
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070060Instrumentation::Instrumentation()
61 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
62 interpreter_stubs_installed_(false),
63 interpret_only_(false), forced_interpret_only_(false),
64 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
65 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020066 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070067 have_exception_caught_listeners_(false),
68 deoptimized_methods_lock_("deoptimized methods lock"),
69 deoptimization_enabled_(false),
70 interpreter_handler_table_(kMainHandlerTable),
71 quick_alloc_entry_points_instrumentation_counter_(0) {
72}
73
Ian Rogers62d6c772013-02-27 08:32:07 -080074bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010075 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
76 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080077 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010078 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
79 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080080 }
81 return true;
82}
83
Ian Rogersef7d42f2014-01-06 12:55:46 -080084static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
85 const void* portable_code, bool have_portable_code)
86 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
87 method->SetEntryPointFromPortableCompiledCode(portable_code);
88 method->SetEntryPointFromQuickCompiledCode(quick_code);
89 bool portable_enabled = method->IsPortableCompiled();
90 if (have_portable_code && !portable_enabled) {
91 method->SetIsPortableCompiled();
92 } else if (portable_enabled) {
93 method->ClearIsPortableCompiled();
94 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010095 if (!method->IsResolutionMethod()) {
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080096 if (quick_code == GetQuickToInterpreterBridge() ||
97 (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) &&
98 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
99 && !method->IsNative() && !method->IsProxyMethod())) {
100 if (kIsDebugBuild) {
101 if (quick_code == GetQuickToInterpreterBridge()) {
102 DCHECK(portable_code == GetPortableToInterpreterBridge());
103 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) {
104 DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker()));
105 }
106 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800107 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800108 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100109 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
110 } else {
111 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
112 }
113 }
114}
115
116void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
117 if (method->IsAbstract() || method->IsProxyMethod()) {
118 // Do not change stubs for these methods.
119 return;
120 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800121 const void* new_portable_code;
122 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100123 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
124 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
125 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800126 bool have_portable_code = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100127 if (uninstall) {
128 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800129 new_portable_code = GetPortableToInterpreterBridge();
130 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100131 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800132 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
133 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100134 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800135 new_portable_code = GetPortableResolutionTrampoline(class_linker);
136 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100137 }
138 } else { // !uninstall
139 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800140 new_portable_code = GetPortableToInterpreterBridge();
141 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100142 } else {
143 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
144 // class, all its static methods code will be set to the instrumentation entry point.
145 // For more details, see ClassLinker::FixupStaticTrampolines.
146 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
147 // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800148 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
149 new_quick_code = class_linker->GetQuickOatCodeFor(method);
150 if (entry_exit_stubs_installed_ && new_quick_code != GetQuickToInterpreterBridge()) {
151 DCHECK(new_portable_code != GetPortableToInterpreterBridge());
152 new_portable_code = GetPortableToInterpreterBridge();
153 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100154 }
155 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800156 new_portable_code = GetPortableResolutionTrampoline(class_linker);
157 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100158 }
159 }
160 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800161 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100162}
163
Ian Rogers62d6c772013-02-27 08:32:07 -0800164// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
165// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100166// Since we may already have done this previously, we need to push new instrumentation frame before
167// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800168static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800169 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
170 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100171 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800172 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100173 existing_instrumentation_frames_count_(instrumentation_stack_->size()),
174 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100175 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
176 last_return_pc_(0) {
177 }
jeffhao725a9572012-11-13 18:20:12 -0800178
Ian Rogers306057f2012-11-26 12:45:53 -0800179 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700180 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800181 if (GetCurrentQuickFrame() == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800182 if (kVerboseInstrumentation) {
183 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100184 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800185 }
Ian Rogers306057f2012-11-26 12:45:53 -0800186 return true; // Ignore shadow frames.
187 }
Ian Rogers306057f2012-11-26 12:45:53 -0800188 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800189 if (kVerboseInstrumentation) {
190 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
191 }
192 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700193 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800194 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800195 if (m->IsRuntimeMethod()) {
196 if (kVerboseInstrumentation) {
197 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
198 }
199 last_return_pc_ = GetReturnPc();
Ian Rogers306057f2012-11-26 12:45:53 -0800200 return true; // Ignore unresolved methods since they will be instrumented after resolution.
201 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800202 if (kVerboseInstrumentation) {
203 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
204 }
205 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100206 if (return_pc == instrumentation_exit_pc_) {
207 // We've reached a frame which has already been installed with instrumentation exit stub.
208 // We should have already installed instrumentation on previous frames.
209 reached_existing_instrumentation_frames_ = true;
210
211 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
212 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
213 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
214 << ", Found " << PrettyMethod(frame.method_);
215 return_pc = frame.return_pc_;
216 if (kVerboseInstrumentation) {
217 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
218 }
219 } else {
220 CHECK_NE(return_pc, 0U);
221 CHECK(!reached_existing_instrumentation_frames_);
222 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
223 false);
224 if (kVerboseInstrumentation) {
225 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
226 }
227
228 // Insert frame before old ones so we do not corrupt the instrumentation stack.
229 auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
230 instrumentation_stack_->insert(it, instrumentation_frame);
231 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800232 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800233 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800234 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100235 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800236 return true; // Continue.
237 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800238 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100239 const size_t existing_instrumentation_frames_count_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800240 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800241 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100242 bool reached_existing_instrumentation_frames_;
243 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800244 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800245 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800246 if (kVerboseInstrumentation) {
247 std::string thread_name;
248 thread->GetThreadName(thread_name);
249 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800250 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100251
252 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers62d6c772013-02-27 08:32:07 -0800253 UniquePtr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700254 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100255 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800256 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100257 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800258
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100259 if (!instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100260 // Create method enter events for all methods currently on the thread's stack. We only do this
261 // if no debugger is attached to prevent from posting events twice.
262 typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
263 for (It it = thread->GetInstrumentationStack()->rbegin(),
264 end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
265 mirror::Object* this_object = (*it).this_object_;
266 mirror::ArtMethod* method = (*it).method_;
267 uint32_t dex_pc = visitor.dex_pcs_.back();
268 visitor.dex_pcs_.pop_back();
269 instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
270 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800271 }
272 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800273}
274
Ian Rogers62d6c772013-02-27 08:32:07 -0800275// Removes the instrumentation exit pc as the return PC for every quick frame.
276static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800277 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
278 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800279 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
280 Instrumentation* instrumentation)
281 : StackVisitor(thread, NULL), thread_(thread),
282 instrumentation_exit_pc_(instrumentation_exit_pc),
283 instrumentation_(instrumentation),
284 instrumentation_stack_(thread->GetInstrumentationStack()),
285 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800286
287 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800288 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800289 return false; // Stop.
290 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700291 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800292 if (GetCurrentQuickFrame() == NULL) {
293 if (kVerboseInstrumentation) {
294 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
295 }
296 return true; // Ignore shadow frames.
297 }
Ian Rogers306057f2012-11-26 12:45:53 -0800298 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800299 if (kVerboseInstrumentation) {
300 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
301 }
Ian Rogers306057f2012-11-26 12:45:53 -0800302 return true; // Ignore upcalls.
303 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800304 bool removed_stub = false;
305 // TODO: make this search more efficient?
Mathieu Chartier02e25112013-08-14 16:14:24 -0700306 for (InstrumentationStackFrame instrumentation_frame : *instrumentation_stack_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800307 if (instrumentation_frame.frame_id_ == GetFrameId()) {
308 if (kVerboseInstrumentation) {
309 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
310 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700311 if (instrumentation_frame.interpreter_entry_) {
312 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
313 } else {
314 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
315 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800316 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100317 if (!instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100318 // Create the method exit events. As the methods didn't really exit the result is 0.
319 // We only do this if no debugger is attached to prevent from posting events twice.
320 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
321 GetDexPc(), JValue());
322 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800323 frames_removed_++;
324 removed_stub = true;
325 break;
326 }
327 }
328 if (!removed_stub) {
329 if (kVerboseInstrumentation) {
330 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800331 }
jeffhao725a9572012-11-13 18:20:12 -0800332 }
333 return true; // Continue.
334 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800335 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800336 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800337 Instrumentation* const instrumentation_;
338 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
339 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800340 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800341 if (kVerboseInstrumentation) {
342 std::string thread_name;
343 thread->GetThreadName(thread_name);
344 LOG(INFO) << "Removing exit stubs in " << thread_name;
345 }
346 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
347 if (stack->size() > 0) {
348 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700349 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800350 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
351 visitor.WalkStack(true);
352 CHECK_EQ(visitor.frames_removed_, stack->size());
353 while (stack->size() > 0) {
354 stack->pop_front();
355 }
jeffhao725a9572012-11-13 18:20:12 -0800356 }
357}
358
Ian Rogers62d6c772013-02-27 08:32:07 -0800359void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
360 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800361 if ((events & kMethodEntered) != 0) {
362 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800363 have_method_entry_listeners_ = true;
364 }
365 if ((events & kMethodExited) != 0) {
366 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800367 have_method_exit_listeners_ = true;
368 }
369 if ((events & kMethodUnwind) != 0) {
370 method_unwind_listeners_.push_back(listener);
371 have_method_unwind_listeners_ = true;
372 }
373 if ((events & kDexPcMoved) != 0) {
374 dex_pc_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800375 have_dex_pc_listeners_ = true;
376 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200377 if ((events & kFieldRead) != 0) {
378 field_read_listeners_.push_back(listener);
379 have_field_read_listeners_ = true;
380 }
381 if ((events & kFieldWritten) != 0) {
382 field_write_listeners_.push_back(listener);
383 have_field_write_listeners_ = true;
384 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700385 if ((events & kExceptionCaught) != 0) {
386 exception_caught_listeners_.push_back(listener);
387 have_exception_caught_listeners_ = true;
388 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200389 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800390}
391
Ian Rogers62d6c772013-02-27 08:32:07 -0800392void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
393 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800394
395 if ((events & kMethodEntered) != 0) {
396 bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
397 listener) != method_entry_listeners_.end();
398 if (contains) {
399 method_entry_listeners_.remove(listener);
400 }
401 have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800402 }
403 if ((events & kMethodExited) != 0) {
404 bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
405 listener) != method_exit_listeners_.end();
406 if (contains) {
407 method_exit_listeners_.remove(listener);
408 }
409 have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800410 }
411 if ((events & kMethodUnwind) != 0) {
412 method_unwind_listeners_.remove(listener);
413 }
414 if ((events & kDexPcMoved) != 0) {
415 bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
416 listener) != dex_pc_listeners_.end();
417 if (contains) {
418 dex_pc_listeners_.remove(listener);
419 }
420 have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800421 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200422 if ((events & kFieldRead) != 0) {
423 bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
424 listener) != field_read_listeners_.end();
425 if (contains) {
426 field_read_listeners_.remove(listener);
427 }
428 have_field_read_listeners_ = field_read_listeners_.size() > 0;
429 }
430 if ((events & kFieldWritten) != 0) {
431 bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
432 listener) != field_write_listeners_.end();
433 if (contains) {
434 field_write_listeners_.remove(listener);
435 }
436 have_field_write_listeners_ = field_write_listeners_.size() > 0;
437 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700438 if ((events & kExceptionCaught) != 0) {
439 exception_caught_listeners_.remove(listener);
440 have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
441 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200442 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800443}
444
Ian Rogers62d6c772013-02-27 08:32:07 -0800445void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
446 interpret_only_ = require_interpreter || forced_interpret_only_;
447 // Compute what level of instrumentation is required and compare to current.
448 int desired_level, current_level;
449 if (require_interpreter) {
450 desired_level = 2;
451 } else if (require_entry_exit_stubs) {
452 desired_level = 1;
453 } else {
454 desired_level = 0;
455 }
456 if (interpreter_stubs_installed_) {
457 current_level = 2;
458 } else if (entry_exit_stubs_installed_) {
459 current_level = 1;
460 } else {
461 current_level = 0;
462 }
463 if (desired_level == current_level) {
464 // We're already set.
465 return;
466 }
467 Thread* self = Thread::Current();
468 Runtime* runtime = Runtime::Current();
469 Locks::thread_list_lock_->AssertNotHeld(self);
470 if (desired_level > 0) {
471 if (require_interpreter) {
472 interpreter_stubs_installed_ = true;
473 } else {
474 CHECK(require_entry_exit_stubs);
475 entry_exit_stubs_installed_ = true;
476 }
477 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
478 instrumentation_stubs_installed_ = true;
479 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
480 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
481 } else {
482 interpreter_stubs_installed_ = false;
483 entry_exit_stubs_installed_ = false;
484 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100485 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700486 bool empty;
487 {
488 ReaderMutexLock mu(self, deoptimized_methods_lock_);
489 empty = deoptimized_methods_.empty(); // Avoid lock violation.
490 }
491 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100492 instrumentation_stubs_installed_ = false;
493 MutexLock mu(self, *Locks::thread_list_lock_);
494 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
495 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800496 }
jeffhao725a9572012-11-13 18:20:12 -0800497}
498
Ian Rogersfa824272013-11-05 16:12:57 -0800499static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
500 thread->ResetQuickAllocEntryPointsForThread();
501}
502
Mathieu Chartier661974a2014-01-09 11:23:53 -0800503void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
504 Runtime* runtime = Runtime::Current();
505 ThreadList* tl = runtime->GetThreadList();
506 if (runtime->IsStarted()) {
507 tl->SuspendAll();
508 }
509 {
510 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
511 SetQuickAllocEntryPointsInstrumented(instrumented);
512 ResetQuickAllocEntryPoints();
513 }
514 if (runtime->IsStarted()) {
515 tl->ResumeAll();
516 }
517}
518
Ian Rogersfa824272013-11-05 16:12:57 -0800519void Instrumentation::InstrumentQuickAllocEntryPoints() {
520 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
521 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800522 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800523 const bool enable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800524 quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800525 if (enable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800526 SetEntrypointsInstrumented(true);
Ian Rogersfa824272013-11-05 16:12:57 -0800527 }
528}
529
530void Instrumentation::UninstrumentQuickAllocEntryPoints() {
531 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
532 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800533 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800534 const bool disable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800535 quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800536 if (disable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800537 SetEntrypointsInstrumented(false);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800538 }
539}
540
541void Instrumentation::ResetQuickAllocEntryPoints() {
542 Runtime* runtime = Runtime::Current();
543 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800544 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
545 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800546 }
547}
548
Ian Rogersef7d42f2014-01-06 12:55:46 -0800549void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
550 const void* portable_code, bool have_portable_code) const {
551 const void* new_portable_code;
552 const void* new_quick_code;
553 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800554 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800555 new_portable_code = portable_code;
556 new_quick_code = quick_code;
557 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700558 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100559 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800560 new_portable_code = GetPortableToInterpreterBridge();
561 new_quick_code = GetQuickToInterpreterBridge();
562 new_have_portable_code = false;
563 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
564 quick_code == GetQuickToInterpreterBridge()) {
565 DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) ||
566 (portable_code == GetPortableToInterpreterBridge()));
567 new_portable_code = portable_code;
568 new_quick_code = quick_code;
569 new_have_portable_code = have_portable_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100570 } else if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800571 new_quick_code = GetQuickInstrumentationEntryPoint();
572 new_portable_code = GetPortableToInterpreterBridge();
573 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700574 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800575 new_portable_code = portable_code;
576 new_quick_code = quick_code;
577 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700578 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800579 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800580 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100581}
582
583void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
584 CHECK(!method->IsNative());
585 CHECK(!method->IsProxyMethod());
586 CHECK(!method->IsAbstract());
587
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700588 Thread* self = Thread::Current();
589 std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair;
590 {
591 WriterMutexLock mu(self, deoptimized_methods_lock_);
592 pair = deoptimized_methods_.insert(method);
593 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100594 bool already_deoptimized = !pair.second;
595 CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
596
597 if (!interpreter_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800598 UpdateEntrypoints(method, GetQuickToInterpreterBridge(), GetPortableToInterpreterBridge(),
599 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100600
601 // Install instrumentation exit stub and instrumentation frames. We may already have installed
602 // these previously so it will only cover the newly created frames.
603 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700604 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100605 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
606 }
607}
608
609void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
610 CHECK(!method->IsNative());
611 CHECK(!method->IsProxyMethod());
612 CHECK(!method->IsAbstract());
613
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700614 Thread* self = Thread::Current();
615 bool empty;
616 {
617 WriterMutexLock mu(self, deoptimized_methods_lock_);
618 auto it = deoptimized_methods_.find(method);
619 CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method)
620 << " is not deoptimized";
621 deoptimized_methods_.erase(it);
622 empty = deoptimized_methods_.empty();
623 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100624
625 // Restore code and possibly stack only if we did not deoptimize everything.
626 if (!interpreter_stubs_installed_) {
627 // Restore its code or resolution trampoline.
628 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800629 if (method->IsStatic() && !method->IsConstructor() &&
630 !method->GetDeclaringClass()->IsInitialized()) {
631 UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker),
632 GetPortableResolutionTrampoline(class_linker), false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100633 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800634 bool have_portable_code = false;
635 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
636 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
637 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100638 }
639
640 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700641 if (empty) {
642 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100643 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
644 instrumentation_stubs_installed_ = false;
645 }
646 }
647}
648
649bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700650 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100651 DCHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700652 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100653}
654
655void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700656 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100657 CHECK(deoptimized_methods_.empty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100658 CHECK_EQ(deoptimization_enabled_, false);
659 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100660}
661
662void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100663 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100664 // If we deoptimized everything, undo it.
665 if (interpreter_stubs_installed_) {
666 UndeoptimizeEverything();
667 }
668 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700669 while (true) {
670 mirror::ArtMethod* method;
671 {
672 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
673 if (deoptimized_methods_.empty()) {
674 break;
675 }
676 method = *deoptimized_methods_.begin();
677 }
678 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100679 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100680 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100681}
682
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100683// Indicates if instrumentation should notify method enter/exit events to the listeners.
684bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
685 return deoptimization_enabled_ || interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100686}
687
688void Instrumentation::DeoptimizeEverything() {
689 CHECK(!interpreter_stubs_installed_);
690 ConfigureStubs(false, true);
691}
692
693void Instrumentation::UndeoptimizeEverything() {
694 CHECK(interpreter_stubs_installed_);
695 ConfigureStubs(false, false);
696}
697
698void Instrumentation::EnableMethodTracing() {
699 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
700 ConfigureStubs(!require_interpreter, require_interpreter);
701}
702
703void Instrumentation::DisableMethodTracing() {
704 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800705}
706
Ian Rogersef7d42f2014-01-06 12:55:46 -0800707const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800708 Runtime* runtime = Runtime::Current();
709 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800710 const void* code = method->GetEntryPointFromQuickCompiledCode();
Ian Rogers62d6c772013-02-27 08:32:07 -0800711 DCHECK(code != NULL);
Ian Rogers848871b2013-08-05 10:56:33 -0700712 if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
713 code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800714 return code;
715 }
716 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800717 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800718}
719
Ian Rogers62d6c772013-02-27 08:32:07 -0800720void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800721 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800722 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700723 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700724 bool is_end = (it == method_entry_listeners_.end());
725 // Implemented this way to prevent problems caused by modification of the list while iterating.
726 while (!is_end) {
727 InstrumentationListener* cur = *it;
728 ++it;
729 is_end = (it == method_entry_listeners_.end());
730 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800731 }
732}
733
734void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800735 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800736 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700737 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700738 bool is_end = (it == method_exit_listeners_.end());
739 // Implemented this way to prevent problems caused by modification of the list while iterating.
740 while (!is_end) {
741 InstrumentationListener* cur = *it;
742 ++it;
743 is_end = (it == method_exit_listeners_.end());
744 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800745 }
746}
747
748void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800749 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800750 uint32_t dex_pc) const {
751 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700752 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100753 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800754 }
755 }
756}
757
758void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800759 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800760 uint32_t dex_pc) const {
761 // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
762 // action where it can remove itself as a listener and break the iterator. The copy only works
763 // around the problem and in general we may have to move to something like reference counting to
764 // ensure listeners are deleted correctly.
765 std::list<InstrumentationListener*> copy(dex_pc_listeners_);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700766 for (InstrumentationListener* listener : copy) {
767 listener->DexPcMoved(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800768 }
769}
770
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200771void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
772 mirror::ArtMethod* method, uint32_t dex_pc,
773 mirror::ArtField* field) const {
774 if (have_field_read_listeners_) {
775 // TODO: same comment than DexPcMovedEventImpl.
776 std::list<InstrumentationListener*> copy(field_read_listeners_);
777 for (InstrumentationListener* listener : copy) {
778 listener->FieldRead(thread, this_object, method, dex_pc, field);
779 }
780 }
781}
782
783void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
784 mirror::ArtMethod* method, uint32_t dex_pc,
785 mirror::ArtField* field, const JValue& field_value) const {
786 if (have_field_write_listeners_) {
787 // TODO: same comment than DexPcMovedEventImpl.
788 std::list<InstrumentationListener*> copy(field_write_listeners_);
789 for (InstrumentationListener* listener : copy) {
790 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
791 }
792 }
793}
794
Ian Rogers62d6c772013-02-27 08:32:07 -0800795void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700796 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800797 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200798 mirror::Throwable* exception_object) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800799 if (have_exception_caught_listeners_) {
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700800 DCHECK_EQ(thread->GetException(NULL), exception_object);
801 thread->ClearException();
Mathieu Chartier02e25112013-08-14 16:14:24 -0700802 for (InstrumentationListener* listener : exception_caught_listeners_) {
803 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800804 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700805 thread->SetException(throw_location, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800806 }
807}
808
809static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
810 int delta)
811 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
812 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
813 if (frame_id != instrumentation_frame.frame_id_) {
814 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
815 << instrumentation_frame.frame_id_;
816 StackVisitor::DescribeStack(self);
817 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
818 }
819}
820
821void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700822 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700823 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800824 // We have a callee-save frame meaning this value is guaranteed to never be 0.
825 size_t frame_id = StackVisitor::ComputeNumFrames(self);
826 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
827 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700828 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800829 }
830 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700831 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800832 stack->push_front(instrumentation_frame);
833
834 MethodEnterEvent(self, this_object, method, 0);
835}
836
837uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
838 uint64_t gpr_result, uint64_t fpr_result) {
839 // Do the pop.
840 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
841 CHECK_GT(stack->size(), 0U);
842 InstrumentationStackFrame instrumentation_frame = stack->front();
843 stack->pop_front();
844
845 // Set return PC and check the sanity of the stack.
846 *return_pc = instrumentation_frame.return_pc_;
847 CheckStackDepth(self, instrumentation_frame, 0);
848
Brian Carlstromea46f952013-07-30 01:26:50 -0700849 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800850 char return_shorty = MethodHelper(method).GetShorty()[0];
851 JValue return_value;
852 if (return_shorty == 'V') {
853 return_value.SetJ(0);
854 } else if (return_shorty == 'F' || return_shorty == 'D') {
855 return_value.SetJ(fpr_result);
856 } else {
857 return_value.SetJ(gpr_result);
858 }
859 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
860 // return_pc.
861 uint32_t dex_pc = DexFile::kDexNoIndex;
862 mirror::Object* this_object = instrumentation_frame.this_object_;
863 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
jeffhao725a9572012-11-13 18:20:12 -0800864
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100865 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
866 // back to an upcall.
867 NthCallerVisitor visitor(self, 1, true);
868 visitor.WalkStack(true);
869 bool deoptimize = (visitor.caller != NULL) &&
870 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
871 if (deoptimize && kVerboseInstrumentation) {
872 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -0800873 }
874 if (deoptimize) {
875 if (kVerboseInstrumentation) {
876 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100877 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -0800878 }
879 self->SetDeoptimizationReturnValue(return_value);
Ian Rogers848871b2013-08-05 10:56:33 -0700880 return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
Ian Rogers62d6c772013-02-27 08:32:07 -0800881 (static_cast<uint64_t>(*return_pc) << 32);
882 } else {
883 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700884 LOG(INFO) << "Returning from " << PrettyMethod(method)
885 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800886 }
887 return *return_pc;
888 }
jeffhao725a9572012-11-13 18:20:12 -0800889}
890
Ian Rogers62d6c772013-02-27 08:32:07 -0800891void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
892 // Do the pop.
893 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
894 CHECK_GT(stack->size(), 0U);
895 InstrumentationStackFrame instrumentation_frame = stack->front();
896 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
897 stack->pop_front();
898
Brian Carlstromea46f952013-07-30 01:26:50 -0700899 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800900 if (is_deoptimization) {
901 if (kVerboseInstrumentation) {
902 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
903 }
904 } else {
905 if (kVerboseInstrumentation) {
906 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
907 }
908
909 // Notify listeners of method unwind.
910 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
911 // return_pc.
912 uint32_t dex_pc = DexFile::kDexNoIndex;
913 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
914 }
915}
916
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700917void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
918 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
919 if (deoptimized_methods_.empty()) {
920 return;
921 }
922 std::set<mirror::ArtMethod*> new_deoptimized_methods;
923 for (mirror::ArtMethod* method : deoptimized_methods_) {
924 DCHECK(method != nullptr);
925 callback(reinterpret_cast<mirror::Object**>(&method), arg, 0, kRootVMInternal);
926 new_deoptimized_methods.insert(method);
927 }
928 deoptimized_methods_ = new_deoptimized_methods;
929}
930
Ian Rogers62d6c772013-02-27 08:32:07 -0800931std::string InstrumentationStackFrame::Dump() const {
932 std::ostringstream os;
933 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
934 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
935 return os.str();
936}
937
938} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800939} // namespace art