blob: 00c81e2b3b6ad542dd71672c08a698ff1e0ce598 [file] [log] [blame]
Andreas Gampe77708d92016-10-07 11:48:21 -07001/* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h. The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation. Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
Andreas Gampe27fa96c2016-10-07 15:05:24 -070032#include "events-inl.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070033
Steven Morelande431e272017-07-18 16:53:49 -070034#include "art_field-inl.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070035#include "art_jvmti.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070036#include "art_method-inl.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070037#include "base/logging.h"
38#include "gc/allocation_listener.h"
Andreas Gampe9b8c5882016-10-21 15:27:46 -070039#include "gc/gc_pause_listener.h"
40#include "gc/heap.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070041#include "gc/scoped_gc_critical_section.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070042#include "handle_scope-inl.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070043#include "instrumentation.h"
44#include "jni_env_ext-inl.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070045#include "jni_internal.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070046#include "mirror/class.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070047#include "mirror/object-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070048#include "nativehelper/ScopedLocalRef.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070049#include "runtime.h"
Andreas Gampec02685c2016-10-17 17:40:27 -070050#include "scoped_thread_state_change-inl.h"
Alex Light9fb1ab12017-09-05 09:32:49 -070051#include "stack.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070052#include "thread-inl.h"
53#include "thread_list.h"
54#include "ti_phase.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070055
56namespace openjdkjvmti {
57
Alex Light73afd322017-01-18 11:17:47 -080058bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
59 return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
60}
61
Andreas Gampe77708d92016-10-07 11:48:21 -070062EventMask& EventMasks::GetEventMask(art::Thread* thread) {
63 if (thread == nullptr) {
64 return global_event_mask;
65 }
66
67 for (auto& pair : thread_event_masks) {
68 const UniqueThread& unique_thread = pair.first;
69 if (unique_thread.first == thread &&
70 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
71 return pair.second;
72 }
73 }
74
75 // TODO: Remove old UniqueThread with the same pointer, if exists.
76
77 thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
78 return thread_event_masks.back().second;
79}
80
81EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
82 if (thread == nullptr) {
83 return &global_event_mask;
84 }
85
86 for (auto& pair : thread_event_masks) {
87 const UniqueThread& unique_thread = pair.first;
88 if (unique_thread.first == thread &&
89 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
90 return &pair.second;
91 }
92 }
93
94 return nullptr;
95}
96
97
Alex Light40d87f42017-01-18 10:27:06 -080098void EventMasks::EnableEvent(art::Thread* thread, ArtJvmtiEvent event) {
Andreas Gampe77708d92016-10-07 11:48:21 -070099 DCHECK(EventMask::EventIsInRange(event));
100 GetEventMask(thread).Set(event);
101 if (thread != nullptr) {
102 unioned_thread_event_mask.Set(event, true);
103 }
104}
105
Alex Light40d87f42017-01-18 10:27:06 -0800106void EventMasks::DisableEvent(art::Thread* thread, ArtJvmtiEvent event) {
Andreas Gampe77708d92016-10-07 11:48:21 -0700107 DCHECK(EventMask::EventIsInRange(event));
108 GetEventMask(thread).Set(event, false);
109 if (thread != nullptr) {
110 // Regenerate union for the event.
111 bool union_value = false;
112 for (auto& pair : thread_event_masks) {
113 union_value |= pair.second.Test(event);
114 if (union_value) {
115 break;
116 }
117 }
118 unioned_thread_event_mask.Set(event, union_value);
119 }
120}
121
Alex Light73afd322017-01-18 11:17:47 -0800122void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
123 if (UNLIKELY(caps.can_retransform_classes == 1)) {
124 // If we are giving this env the retransform classes cap we need to switch all events of
125 // NonTransformable to Transformable and vice versa.
126 ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
127 : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
128 ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
129 : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
130 if (global_event_mask.Test(to_remove)) {
131 CHECK(!global_event_mask.Test(to_add));
132 global_event_mask.Set(to_remove, false);
133 global_event_mask.Set(to_add, true);
134 }
135
136 if (unioned_thread_event_mask.Test(to_remove)) {
137 CHECK(!unioned_thread_event_mask.Test(to_add));
138 unioned_thread_event_mask.Set(to_remove, false);
139 unioned_thread_event_mask.Set(to_add, true);
140 }
141 for (auto thread_mask : thread_event_masks) {
142 if (thread_mask.second.Test(to_remove)) {
143 CHECK(!thread_mask.second.Test(to_add));
144 thread_mask.second.Set(to_remove, false);
145 thread_mask.second.Set(to_add, true);
146 }
147 }
148 }
149}
150
Andreas Gampe77708d92016-10-07 11:48:21 -0700151void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
Alex Lightbb766462017-04-12 16:13:33 -0700152 // Since we never shrink this array we might as well try to fill gaps.
153 auto it = std::find(envs.begin(), envs.end(), nullptr);
154 if (it != envs.end()) {
155 *it = env;
156 } else {
157 envs.push_back(env);
158 }
Andreas Gampe77708d92016-10-07 11:48:21 -0700159}
160
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800161void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
Alex Lightbb766462017-04-12 16:13:33 -0700162 // Since we might be currently iterating over the envs list we cannot actually erase elements.
163 // Instead we will simply replace them with 'nullptr' and skip them manually.
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800164 auto it = std::find(envs.begin(), envs.end(), env);
165 if (it != envs.end()) {
Alex Lightbb766462017-04-12 16:13:33 -0700166 *it = nullptr;
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800167 for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
168 i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
169 ++i) {
170 RecalculateGlobalEventMask(static_cast<ArtJvmtiEvent>(i));
171 }
172 }
173}
174
Alex Light40d87f42017-01-18 10:27:06 -0800175static bool IsThreadControllable(ArtJvmtiEvent event) {
Andreas Gampe77708d92016-10-07 11:48:21 -0700176 switch (event) {
Alex Light40d87f42017-01-18 10:27:06 -0800177 case ArtJvmtiEvent::kVmInit:
178 case ArtJvmtiEvent::kVmStart:
179 case ArtJvmtiEvent::kVmDeath:
180 case ArtJvmtiEvent::kThreadStart:
181 case ArtJvmtiEvent::kCompiledMethodLoad:
182 case ArtJvmtiEvent::kCompiledMethodUnload:
183 case ArtJvmtiEvent::kDynamicCodeGenerated:
184 case ArtJvmtiEvent::kDataDumpRequest:
Andreas Gampe77708d92016-10-07 11:48:21 -0700185 return false;
186
187 default:
188 return true;
189 }
190}
191
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700192class JvmtiAllocationListener : public art::gc::AllocationListener {
193 public:
194 explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
195
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700196 void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700197 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700198 DCHECK_EQ(self, art::Thread::Current());
199
Alex Light40d87f42017-01-18 10:27:06 -0800200 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
Mathieu Chartiera7118042016-10-12 15:45:58 -0700201 art::StackHandleScope<1> hs(self);
202 auto h = hs.NewHandleWrapper(obj);
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700203 // jvmtiEventVMObjectAlloc parameters:
204 // jvmtiEnv *jvmti_env,
205 // JNIEnv* jni_env,
206 // jthread thread,
207 // jobject object,
208 // jclass object_klass,
209 // jlong size
210 art::JNIEnvExt* jni_env = self->GetJniEnv();
211
212 jthread thread_peer;
213 if (self->IsStillStarting()) {
214 thread_peer = nullptr;
215 } else {
216 thread_peer = jni_env->AddLocalReference<jthread>(self->GetPeer());
217 }
218
219 ScopedLocalRef<jthread> thread(jni_env, thread_peer);
220 ScopedLocalRef<jobject> object(
221 jni_env, jni_env->AddLocalReference<jobject>(*obj));
222 ScopedLocalRef<jclass> klass(
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700223 jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700224
Andreas Gampe983c1752017-01-23 19:46:56 -0800225 handler_->DispatchEvent<ArtJvmtiEvent::kVmObjectAlloc>(self,
226 reinterpret_cast<JNIEnv*>(jni_env),
227 thread.get(),
228 object.get(),
229 klass.get(),
230 static_cast<jlong>(byte_count));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700231 }
232 }
233
234 private:
235 EventHandler* handler_;
236};
237
238static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
Andreas Gampec02685c2016-10-17 17:40:27 -0700239 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
240 // now, do a workaround: (possibly) acquire and release.
241 art::ScopedObjectAccess soa(art::Thread::Current());
242 art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700243 if (enable) {
244 art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
245 } else {
246 art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
247 }
248}
249
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700250// Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
251class JvmtiGcPauseListener : public art::gc::GcPauseListener {
252 public:
253 explicit JvmtiGcPauseListener(EventHandler* handler)
254 : handler_(handler),
255 start_enabled_(false),
256 finish_enabled_(false) {}
257
258 void StartPause() OVERRIDE {
Andreas Gampe983c1752017-01-23 19:46:56 -0800259 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(nullptr);
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700260 }
261
262 void EndPause() OVERRIDE {
Andreas Gampe983c1752017-01-23 19:46:56 -0800263 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(nullptr);
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700264 }
265
266 bool IsEnabled() {
267 return start_enabled_ || finish_enabled_;
268 }
269
270 void SetStartEnabled(bool e) {
271 start_enabled_ = e;
272 }
273
274 void SetFinishEnabled(bool e) {
275 finish_enabled_ = e;
276 }
277
278 private:
279 EventHandler* handler_;
280 bool start_enabled_;
281 bool finish_enabled_;
282};
283
Alex Light40d87f42017-01-18 10:27:06 -0800284static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700285 bool old_state = listener->IsEnabled();
286
Alex Light40d87f42017-01-18 10:27:06 -0800287 if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700288 listener->SetStartEnabled(enable);
289 } else {
290 listener->SetFinishEnabled(enable);
291 }
292
293 bool new_state = listener->IsEnabled();
294
295 if (old_state != new_state) {
296 if (new_state) {
297 art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
298 } else {
299 art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
300 }
301 }
302}
303
Alex Lightb7edcda2017-04-27 13:20:31 -0700304template<typename Type>
305static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
306 REQUIRES_SHARED(art::Locks::mutator_lock_) {
307 return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
308}
309
310class JvmtiMethodTraceListener FINAL : public art::instrumentation::InstrumentationListener {
311 public:
312 explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
313
314 template<ArtJvmtiEvent kEvent, typename ...Args>
315 void RunEventCallback(art::Thread* self, art::JNIEnvExt* jnienv, Args... args)
316 REQUIRES_SHARED(art::Locks::mutator_lock_) {
317 ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
318 // Just give the event a good sized JNI frame. 100 should be fine.
319 jnienv->PushFrame(100);
320 {
321 // Need to do trampoline! :(
322 art::ScopedThreadSuspension sts(self, art::ThreadState::kNative);
323 event_handler_->DispatchEvent<kEvent>(self,
324 static_cast<JNIEnv*>(jnienv),
325 thread_jni.get(),
326 args...);
327 }
328 jnienv->PopFrame();
329 }
330
331 // Call-back for when a method is entered.
332 void MethodEntered(art::Thread* self,
333 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
334 art::ArtMethod* method,
335 uint32_t dex_pc ATTRIBUTE_UNUSED)
336 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
337 if (!method->IsRuntimeMethod() &&
338 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
339 art::JNIEnvExt* jnienv = self->GetJniEnv();
340 RunEventCallback<ArtJvmtiEvent::kMethodEntry>(self,
341 jnienv,
342 art::jni::EncodeArtMethod(method));
343 }
344 }
345
346 // Callback for when a method is exited with a reference return value.
347 void MethodExited(art::Thread* self,
348 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
349 art::ArtMethod* method,
350 uint32_t dex_pc ATTRIBUTE_UNUSED,
351 art::Handle<art::mirror::Object> return_value)
352 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
353 if (!method->IsRuntimeMethod() &&
354 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
355 DCHECK_EQ(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
356 << method->PrettyMethod();
357 DCHECK(!self->IsExceptionPending());
358 jvalue val;
359 art::JNIEnvExt* jnienv = self->GetJniEnv();
360 ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
361 val.l = return_jobj.get();
362 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
363 self,
364 jnienv,
365 art::jni::EncodeArtMethod(method),
366 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
367 val);
368 }
369 }
370
371 // Call-back for when a method is exited.
372 void MethodExited(art::Thread* self,
373 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
374 art::ArtMethod* method,
375 uint32_t dex_pc ATTRIBUTE_UNUSED,
376 const art::JValue& return_value)
377 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
378 if (!method->IsRuntimeMethod() &&
379 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
380 DCHECK_NE(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
381 << method->PrettyMethod();
382 DCHECK(!self->IsExceptionPending());
383 jvalue val;
384 art::JNIEnvExt* jnienv = self->GetJniEnv();
385 // 64bit integer is the largest value in the union so we should be fine simply copying it into
386 // the union.
387 val.j = return_value.GetJ();
388 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
389 self,
390 jnienv,
391 art::jni::EncodeArtMethod(method),
392 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
393 val);
394 }
395 }
396
397 // Call-back for when a method is popped due to an exception throw. A method will either cause a
398 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
399 void MethodUnwind(art::Thread* self,
400 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
401 art::ArtMethod* method,
402 uint32_t dex_pc ATTRIBUTE_UNUSED)
403 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
404 if (!method->IsRuntimeMethod() &&
405 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
406 jvalue val;
407 // Just set this to 0xffffffffffffffff so it's not uninitialized.
408 val.j = static_cast<jlong>(-1);
409 art::JNIEnvExt* jnienv = self->GetJniEnv();
410 art::StackHandleScope<1> hs(self);
411 art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
412 CHECK(!old_exception.IsNull());
413 self->ClearException();
414 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
415 self,
416 jnienv,
417 art::jni::EncodeArtMethod(method),
418 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_TRUE),
419 val);
420 // Match RI behavior of just throwing away original exception if a new one is thrown.
421 if (LIKELY(!self->IsExceptionPending())) {
422 self->SetException(old_exception.Get());
423 }
424 }
425 }
426
Alex Lighta26e3492017-06-27 17:55:37 -0700427 // Call-back for when the dex pc moves in a method.
428 void DexPcMoved(art::Thread* self,
Alex Lightb7edcda2017-04-27 13:20:31 -0700429 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
Alex Lighta26e3492017-06-27 17:55:37 -0700430 art::ArtMethod* method,
431 uint32_t new_dex_pc)
Alex Lightb7edcda2017-04-27 13:20:31 -0700432 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
Alex Lighta26e3492017-06-27 17:55:37 -0700433 DCHECK(!method->IsRuntimeMethod());
434 // Default methods might be copied to multiple classes. We need to get the canonical version of
435 // this method so that we can check for breakpoints correctly.
436 // TODO We should maybe do this on other events to ensure that we are consistent WRT default
437 // methods. This could interact with obsolete methods if we ever let interface redefinition
438 // happen though.
439 method = method->GetCanonicalMethod();
440 art::JNIEnvExt* jnienv = self->GetJniEnv();
441 jmethodID jmethod = art::jni::EncodeArtMethod(method);
442 jlocation location = static_cast<jlocation>(new_dex_pc);
443 // Step event is reported first according to the spec.
444 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
445 RunEventCallback<ArtJvmtiEvent::kSingleStep>(self, jnienv, jmethod, location);
446 }
447 // Next we do the Breakpoint events. The Dispatch code will filter the individual
448 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
449 RunEventCallback<ArtJvmtiEvent::kBreakpoint>(self, jnienv, jmethod, location);
450 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700451 }
452
453 // Call-back for when we read from a field.
Alex Light084fa372017-06-16 08:58:34 -0700454 void FieldRead(art::Thread* self,
455 art::Handle<art::mirror::Object> this_object,
456 art::ArtMethod* method,
457 uint32_t dex_pc,
458 art::ArtField* field)
Alex Lightb7edcda2017-04-27 13:20:31 -0700459 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
Alex Light084fa372017-06-16 08:58:34 -0700460 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
461 art::JNIEnvExt* jnienv = self->GetJniEnv();
462 // DCHECK(!self->IsExceptionPending());
463 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
464 ScopedLocalRef<jobject> fklass(jnienv,
465 AddLocalRef<jobject>(jnienv,
466 field->GetDeclaringClass().Ptr()));
467 RunEventCallback<ArtJvmtiEvent::kFieldAccess>(self,
468 jnienv,
469 art::jni::EncodeArtMethod(method),
470 static_cast<jlocation>(dex_pc),
471 static_cast<jclass>(fklass.get()),
472 this_ref.get(),
473 art::jni::EncodeArtField(field));
474 }
475 }
476
477 void FieldWritten(art::Thread* self,
478 art::Handle<art::mirror::Object> this_object,
479 art::ArtMethod* method,
480 uint32_t dex_pc,
481 art::ArtField* field,
482 art::Handle<art::mirror::Object> new_val)
483 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
484 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
485 art::JNIEnvExt* jnienv = self->GetJniEnv();
486 // DCHECK(!self->IsExceptionPending());
487 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
488 ScopedLocalRef<jobject> fklass(jnienv,
489 AddLocalRef<jobject>(jnienv,
490 field->GetDeclaringClass().Ptr()));
491 ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
492 jvalue val;
493 val.l = fval.get();
494 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
495 self,
496 jnienv,
497 art::jni::EncodeArtMethod(method),
498 static_cast<jlocation>(dex_pc),
499 static_cast<jclass>(fklass.get()),
500 field->IsStatic() ? nullptr : this_ref.get(),
501 art::jni::EncodeArtField(field),
502 'L', // type_char
503 val);
504 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700505 }
506
507 // Call-back for when we write into a field.
Alex Light084fa372017-06-16 08:58:34 -0700508 void FieldWritten(art::Thread* self,
509 art::Handle<art::mirror::Object> this_object,
510 art::ArtMethod* method,
511 uint32_t dex_pc,
512 art::ArtField* field,
513 const art::JValue& field_value)
Alex Lightb7edcda2017-04-27 13:20:31 -0700514 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
Alex Light084fa372017-06-16 08:58:34 -0700515 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
516 art::JNIEnvExt* jnienv = self->GetJniEnv();
517 DCHECK(!self->IsExceptionPending());
518 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
519 ScopedLocalRef<jobject> fklass(jnienv,
520 AddLocalRef<jobject>(jnienv,
521 field->GetDeclaringClass().Ptr()));
522 char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
523 jvalue val;
524 // 64bit integer is the largest value in the union so we should be fine simply copying it into
525 // the union.
526 val.j = field_value.GetJ();
527 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
528 self,
529 jnienv,
530 art::jni::EncodeArtMethod(method),
531 static_cast<jlocation>(dex_pc),
532 static_cast<jclass>(fklass.get()),
533 field->IsStatic() ? nullptr : this_ref.get(), // nb static field modification get given
534 // the class as this_object for some
535 // reason.
536 art::jni::EncodeArtField(field),
537 type_char,
538 val);
539 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700540 }
541
Alex Lighte814f9d2017-07-31 16:14:39 -0700542 void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
543 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
544 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFramePop)) {
545 art::JNIEnvExt* jnienv = self->GetJniEnv();
546 jboolean is_exception_pending = self->IsExceptionPending();
547 RunEventCallback<ArtJvmtiEvent::kFramePop>(
548 self,
549 jnienv,
550 art::jni::EncodeArtMethod(frame.GetMethod()),
551 is_exception_pending,
552 &frame);
553 }
554 }
555
Alex Light9fb1ab12017-09-05 09:32:49 -0700556 static void FindCatchMethodsFromThrow(art::Thread* self,
557 art::Handle<art::mirror::Throwable> exception,
558 /*out*/ art::ArtMethod** out_method,
559 /*out*/ uint32_t* dex_pc)
560 REQUIRES_SHARED(art::Locks::mutator_lock_) {
561 // Finds the location where this exception will most likely be caught. We ignore intervening
562 // native frames (which could catch the exception) and return the closest java frame with a
563 // compatible catch statement.
564 class CatchLocationFinder FINAL : public art::StackVisitor {
565 public:
566 CatchLocationFinder(art::Thread* target,
567 art::Handle<art::mirror::Class> exception_class,
568 art::Context* context,
569 /*out*/ art::ArtMethod** out_catch_method,
570 /*out*/ uint32_t* out_catch_pc)
571 REQUIRES_SHARED(art::Locks::mutator_lock_)
572 : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
573 exception_class_(exception_class),
574 catch_method_ptr_(out_catch_method),
575 catch_dex_pc_ptr_(out_catch_pc) {}
576
577 bool VisitFrame() OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
578 art::ArtMethod* method = GetMethod();
579 DCHECK(method != nullptr);
580 if (method->IsRuntimeMethod()) {
581 return true;
582 }
583
584 if (!method->IsNative()) {
585 uint32_t cur_dex_pc = GetDexPc();
586 if (cur_dex_pc == art::DexFile::kDexNoIndex) {
587 // This frame looks opaque. Just keep on going.
588 return true;
589 }
590 bool has_no_move_exception = false;
591 uint32_t found_dex_pc = method->FindCatchBlock(
592 exception_class_, cur_dex_pc, &has_no_move_exception);
593 if (found_dex_pc != art::DexFile::kDexNoIndex) {
594 // We found the catch. Store the result and return.
595 *catch_method_ptr_ = method;
596 *catch_dex_pc_ptr_ = found_dex_pc;
597 return false;
598 }
599 }
600 return true;
601 }
602
603 private:
604 art::Handle<art::mirror::Class> exception_class_;
605 art::ArtMethod** catch_method_ptr_;
606 uint32_t* catch_dex_pc_ptr_;
607
608 DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
609 };
610
611 art::StackHandleScope<1> hs(self);
612 *out_method = nullptr;
613 *dex_pc = 0;
614 std::unique_ptr<art::Context> context(art::Context::Create());
615
616 CatchLocationFinder clf(self,
617 hs.NewHandle(exception->GetClass()),
618 context.get(),
619 /*out*/ out_method,
620 /*out*/ dex_pc);
621 clf.WalkStack(/* include_transitions */ false);
622 }
623
Alex Light6e1607e2017-08-23 10:06:18 -0700624 // Call-back when an exception is thrown.
Alex Light9fb1ab12017-09-05 09:32:49 -0700625 void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
Alex Lightb7edcda2017-04-27 13:20:31 -0700626 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
Alex Light9fb1ab12017-09-05 09:32:49 -0700627 DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
628 // The instrumentation events get rid of this for us.
629 DCHECK(!self->IsExceptionPending());
630 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
631 art::JNIEnvExt* jnienv = self->GetJniEnv();
632 art::ArtMethod* catch_method;
633 uint32_t catch_pc;
634 FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
635 uint32_t dex_pc = 0;
636 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
637 /* check_suspended */ true,
638 /* abort_on_error */ art::kIsDebugBuild);
639 ScopedLocalRef<jobject> exception(jnienv,
640 AddLocalRef<jobject>(jnienv, exception_object.Get()));
641 RunEventCallback<ArtJvmtiEvent::kException>(
642 self,
643 jnienv,
644 art::jni::EncodeArtMethod(method),
645 static_cast<jlocation>(dex_pc),
646 exception.get(),
647 art::jni::EncodeArtMethod(catch_method),
648 static_cast<jlocation>(catch_pc));
649 }
650 return;
651 }
652
653 // Call-back when an exception is handled.
654 void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
655 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
656 // Since the exception has already been handled there shouldn't be one pending.
657 DCHECK(!self->IsExceptionPending());
658 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
659 art::JNIEnvExt* jnienv = self->GetJniEnv();
660 uint32_t dex_pc;
661 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
662 /* check_suspended */ true,
663 /* abort_on_error */ art::kIsDebugBuild);
664 ScopedLocalRef<jobject> exception(jnienv,
665 AddLocalRef<jobject>(jnienv, exception_object.Get()));
666 RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
667 self,
668 jnienv,
669 art::jni::EncodeArtMethod(method),
670 static_cast<jlocation>(dex_pc),
671 exception.get());
672 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700673 return;
674 }
675
676 // Call-back for when we execute a branch.
677 void Branch(art::Thread* self ATTRIBUTE_UNUSED,
678 art::ArtMethod* method ATTRIBUTE_UNUSED,
679 uint32_t dex_pc ATTRIBUTE_UNUSED,
680 int32_t dex_pc_offset ATTRIBUTE_UNUSED)
681 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
682 return;
683 }
684
685 // Call-back for when we get an invokevirtual or an invokeinterface.
686 void InvokeVirtualOrInterface(art::Thread* self ATTRIBUTE_UNUSED,
687 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
688 art::ArtMethod* caller ATTRIBUTE_UNUSED,
689 uint32_t dex_pc ATTRIBUTE_UNUSED,
690 art::ArtMethod* callee ATTRIBUTE_UNUSED)
691 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
692 return;
693 }
694
695 private:
696 EventHandler* const event_handler_;
697};
698
699static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
700 switch (event) {
701 case ArtJvmtiEvent::kMethodEntry:
702 return art::instrumentation::Instrumentation::kMethodEntered;
703 case ArtJvmtiEvent::kMethodExit:
704 return art::instrumentation::Instrumentation::kMethodExited |
705 art::instrumentation::Instrumentation::kMethodUnwind;
Alex Light084fa372017-06-16 08:58:34 -0700706 case ArtJvmtiEvent::kFieldModification:
707 return art::instrumentation::Instrumentation::kFieldWritten;
708 case ArtJvmtiEvent::kFieldAccess:
709 return art::instrumentation::Instrumentation::kFieldRead;
Alex Lighta26e3492017-06-27 17:55:37 -0700710 case ArtJvmtiEvent::kBreakpoint:
711 case ArtJvmtiEvent::kSingleStep:
712 return art::instrumentation::Instrumentation::kDexPcMoved;
Alex Lighte814f9d2017-07-31 16:14:39 -0700713 case ArtJvmtiEvent::kFramePop:
714 return art::instrumentation::Instrumentation::kWatchedFramePop;
Alex Light9fb1ab12017-09-05 09:32:49 -0700715 case ArtJvmtiEvent::kException:
716 return art::instrumentation::Instrumentation::kExceptionThrown;
717 case ArtJvmtiEvent::kExceptionCatch:
718 return art::instrumentation::Instrumentation::kExceptionHandled;
Alex Lightb7edcda2017-04-27 13:20:31 -0700719 default:
720 LOG(FATAL) << "Unknown event ";
721 return 0;
722 }
723}
724
Alex Light084fa372017-06-16 08:58:34 -0700725static void SetupTraceListener(JvmtiMethodTraceListener* listener,
726 ArtJvmtiEvent event,
727 bool enable) {
728 art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
Alex Lightb7edcda2017-04-27 13:20:31 -0700729 uint32_t new_events = GetInstrumentationEventsFor(event);
730 art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
731 art::gc::ScopedGCCriticalSection gcs(art::Thread::Current(),
732 art::gc::kGcCauseInstrumentation,
733 art::gc::kCollectorTypeInstrumentation);
734 art::ScopedSuspendAll ssa("jvmti method tracing installation");
735 if (enable) {
Alex Lighta26e3492017-06-27 17:55:37 -0700736 // TODO Depending on the features being used we should be able to avoid deoptimizing everything
737 // like we do here.
Alex Lightb7edcda2017-04-27 13:20:31 -0700738 if (!instr->AreAllMethodsDeoptimized()) {
739 instr->EnableMethodTracing("jvmti-tracing", /*needs_interpreter*/true);
740 }
741 instr->AddListener(listener, new_events);
742 } else {
743 instr->RemoveListener(listener, new_events);
744 }
745}
746
Alex Light0a5ec3d2017-07-25 16:50:26 -0700747// Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
748// the switch interpreter) when we try to get or set a local variable.
Alex Lightbebd7bd2017-07-25 14:05:52 -0700749void EventHandler::HandleLocalAccessCapabilityAdded() {
Alex Light0a5ec3d2017-07-25 16:50:26 -0700750 class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
751 public:
752 explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
753 : runtime_(runtime) {}
754
755 bool operator()(art::ObjPtr<art::mirror::Class> klass)
756 OVERRIDE REQUIRES(art::Locks::mutator_lock_) {
757 for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
758 const void* code = m.GetEntryPointFromQuickCompiledCode();
759 if (m.IsNative() || m.IsProxyMethod()) {
760 continue;
761 } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
762 !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
763 runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
764 }
765 }
766 return true;
767 }
768
769 private:
770 art::Runtime* runtime_;
771 };
772 art::ScopedObjectAccess soa(art::Thread::Current());
773 UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
774 art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
Alex Lightbebd7bd2017-07-25 14:05:52 -0700775}
776
Andreas Gampe77708d92016-10-07 11:48:21 -0700777// Handle special work for the given event type, if necessary.
Alex Light40d87f42017-01-18 10:27:06 -0800778void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700779 switch (event) {
Alex Light40d87f42017-01-18 10:27:06 -0800780 case ArtJvmtiEvent::kVmObjectAlloc:
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700781 SetupObjectAllocationTracking(alloc_listener_.get(), enable);
782 return;
783
Alex Light40d87f42017-01-18 10:27:06 -0800784 case ArtJvmtiEvent::kGarbageCollectionStart:
785 case ArtJvmtiEvent::kGarbageCollectionFinish:
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700786 SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
787 return;
788
Alex Lighta26e3492017-06-27 17:55:37 -0700789 case ArtJvmtiEvent::kBreakpoint:
790 case ArtJvmtiEvent::kSingleStep: {
791 ArtJvmtiEvent other = (event == ArtJvmtiEvent::kBreakpoint) ? ArtJvmtiEvent::kSingleStep
792 : ArtJvmtiEvent::kBreakpoint;
793 // We only need to do anything if there isn't already a listener installed/held-on by the
794 // other jvmti event that uses DexPcMoved.
795 if (!IsEventEnabledAnywhere(other)) {
796 SetupTraceListener(method_trace_listener_.get(), event, enable);
797 }
798 return;
799 }
Alex Lighte814f9d2017-07-31 16:14:39 -0700800 // FramePop can never be disabled once it's been turned on since we would either need to deal
801 // with dangling pointers or have missed events.
802 case ArtJvmtiEvent::kFramePop:
803 if (!enable || (enable && frame_pop_enabled)) {
804 break;
805 } else {
806 SetupTraceListener(method_trace_listener_.get(), event, enable);
807 break;
808 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700809 case ArtJvmtiEvent::kMethodEntry:
810 case ArtJvmtiEvent::kMethodExit:
Alex Light084fa372017-06-16 08:58:34 -0700811 case ArtJvmtiEvent::kFieldAccess:
812 case ArtJvmtiEvent::kFieldModification:
Alex Light9fb1ab12017-09-05 09:32:49 -0700813 case ArtJvmtiEvent::kException:
814 case ArtJvmtiEvent::kExceptionCatch:
Alex Light084fa372017-06-16 08:58:34 -0700815 SetupTraceListener(method_trace_listener_.get(), event, enable);
Alex Lightb7edcda2017-04-27 13:20:31 -0700816 return;
817
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700818 default:
819 break;
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700820 }
Andreas Gampe77708d92016-10-07 11:48:21 -0700821}
822
Alex Light9db679d2017-01-25 15:28:04 -0800823// Checks to see if the env has the capabilities associated with the given event.
824static bool HasAssociatedCapability(ArtJvmTiEnv* env,
825 ArtJvmtiEvent event) {
826 jvmtiCapabilities caps = env->capabilities;
827 switch (event) {
828 case ArtJvmtiEvent::kBreakpoint:
829 return caps.can_generate_breakpoint_events == 1;
830
831 case ArtJvmtiEvent::kCompiledMethodLoad:
832 case ArtJvmtiEvent::kCompiledMethodUnload:
833 return caps.can_generate_compiled_method_load_events == 1;
834
835 case ArtJvmtiEvent::kException:
836 case ArtJvmtiEvent::kExceptionCatch:
837 return caps.can_generate_exception_events == 1;
838
839 case ArtJvmtiEvent::kFieldAccess:
840 return caps.can_generate_field_access_events == 1;
841
842 case ArtJvmtiEvent::kFieldModification:
843 return caps.can_generate_field_modification_events == 1;
844
845 case ArtJvmtiEvent::kFramePop:
846 return caps.can_generate_frame_pop_events == 1;
847
848 case ArtJvmtiEvent::kGarbageCollectionStart:
849 case ArtJvmtiEvent::kGarbageCollectionFinish:
850 return caps.can_generate_garbage_collection_events == 1;
851
852 case ArtJvmtiEvent::kMethodEntry:
853 return caps.can_generate_method_entry_events == 1;
854
855 case ArtJvmtiEvent::kMethodExit:
856 return caps.can_generate_method_exit_events == 1;
857
858 case ArtJvmtiEvent::kMonitorContendedEnter:
859 case ArtJvmtiEvent::kMonitorContendedEntered:
860 case ArtJvmtiEvent::kMonitorWait:
861 case ArtJvmtiEvent::kMonitorWaited:
862 return caps.can_generate_monitor_events == 1;
863
864 case ArtJvmtiEvent::kNativeMethodBind:
865 return caps.can_generate_native_method_bind_events == 1;
866
867 case ArtJvmtiEvent::kObjectFree:
868 return caps.can_generate_object_free_events == 1;
869
870 case ArtJvmtiEvent::kSingleStep:
871 return caps.can_generate_single_step_events == 1;
872
873 case ArtJvmtiEvent::kVmObjectAlloc:
874 return caps.can_generate_vm_object_alloc_events == 1;
875
876 default:
877 return true;
878 }
879}
880
Andreas Gampe77708d92016-10-07 11:48:21 -0700881jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
882 art::Thread* thread,
Alex Light40d87f42017-01-18 10:27:06 -0800883 ArtJvmtiEvent event,
Andreas Gampe77708d92016-10-07 11:48:21 -0700884 jvmtiEventMode mode) {
885 if (thread != nullptr) {
886 art::ThreadState state = thread->GetState();
887 if (state == art::ThreadState::kStarting ||
888 state == art::ThreadState::kTerminated ||
889 thread->IsStillStarting()) {
890 return ERR(THREAD_NOT_ALIVE);
891 }
892 if (!IsThreadControllable(event)) {
893 return ERR(ILLEGAL_ARGUMENT);
894 }
895 }
896
Andreas Gampe77708d92016-10-07 11:48:21 -0700897 if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
898 return ERR(ILLEGAL_ARGUMENT);
899 }
900
901 if (!EventMask::EventIsInRange(event)) {
902 return ERR(INVALID_EVENT_TYPE);
903 }
904
Alex Light9db679d2017-01-25 15:28:04 -0800905 if (!HasAssociatedCapability(env, event)) {
906 return ERR(MUST_POSSESS_CAPABILITY);
907 }
908
Andreas Gampe8b862ff2016-10-17 17:49:59 -0700909 bool old_state = global_mask.Test(event);
910
Andreas Gampe77708d92016-10-07 11:48:21 -0700911 if (mode == JVMTI_ENABLE) {
912 env->event_masks.EnableEvent(thread, event);
913 global_mask.Set(event);
914 } else {
915 DCHECK_EQ(mode, JVMTI_DISABLE);
916
917 env->event_masks.DisableEvent(thread, event);
Alex Light73afd322017-01-18 11:17:47 -0800918 RecalculateGlobalEventMask(event);
Andreas Gampe77708d92016-10-07 11:48:21 -0700919 }
920
Andreas Gampe8b862ff2016-10-17 17:49:59 -0700921 bool new_state = global_mask.Test(event);
922
Andreas Gampe77708d92016-10-07 11:48:21 -0700923 // Handle any special work required for the event type.
Andreas Gampe8b862ff2016-10-17 17:49:59 -0700924 if (new_state != old_state) {
925 HandleEventType(event, mode == JVMTI_ENABLE);
926 }
Andreas Gampe77708d92016-10-07 11:48:21 -0700927
928 return ERR(NONE);
929}
930
Alex Lightb7edcda2017-04-27 13:20:31 -0700931void EventHandler::Shutdown() {
932 // Need to remove the method_trace_listener_ if it's there.
933 art::Thread* self = art::Thread::Current();
934 art::gc::ScopedGCCriticalSection gcs(self,
935 art::gc::kGcCauseInstrumentation,
936 art::gc::kCollectorTypeInstrumentation);
937 art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
938 // Just remove every possible event.
939 art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
940}
941
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700942EventHandler::EventHandler() {
943 alloc_listener_.reset(new JvmtiAllocationListener(this));
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700944 gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
Alex Lightb7edcda2017-04-27 13:20:31 -0700945 method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700946}
947
948EventHandler::~EventHandler() {
949}
950
Andreas Gampe77708d92016-10-07 11:48:21 -0700951} // namespace openjdkjvmti