blob: 2c5ebfe3e188e91623391eee26a8153288818fe9 [file] [log] [blame]
Andreas Gampe77708d92016-10-07 11:48:21 -07001/* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h. The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation. Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
Andreas Gampe27fa96c2016-10-07 15:05:24 -070032#include "events-inl.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070033
Alex Light77fee872017-09-05 14:51:49 -070034#include <array>
Charles Munger5cc0e752018-11-09 12:30:46 -080035#include <sys/time.h>
Alex Light77fee872017-09-05 14:51:49 -070036
Andreas Gampee5d23982019-01-08 10:34:26 -080037#include "arch/context.h"
Steven Morelande431e272017-07-18 16:53:49 -070038#include "art_field-inl.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070039#include "art_jvmti.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070040#include "art_method-inl.h"
Alex Light0fa17862017-10-24 13:43:05 -070041#include "deopt_manager.h"
David Sehr9e734c72018-01-04 17:56:19 -080042#include "dex/dex_file_types.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070043#include "gc/allocation_listener.h"
Andreas Gampe9b8c5882016-10-21 15:27:46 -070044#include "gc/gc_pause_listener.h"
45#include "gc/heap.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070046#include "gc/scoped_gc_critical_section.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070047#include "handle_scope-inl.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070048#include "instrumentation.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010049#include "jni/jni_env_ext-inl.h"
50#include "jni/jni_internal.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070051#include "mirror/class.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070052#include "mirror/object-inl.h"
Vladimir Markof52d92f2019-03-29 12:33:02 +000053#include "monitor-inl.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -070054#include "nativehelper/scoped_local_ref.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070055#include "runtime.h"
Andreas Gampec02685c2016-10-17 17:40:27 -070056#include "scoped_thread_state_change-inl.h"
Alex Light9fb1ab12017-09-05 09:32:49 -070057#include "stack.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070058#include "thread-inl.h"
59#include "thread_list.h"
60#include "ti_phase.h"
Charles Munger5cc0e752018-11-09 12:30:46 -080061#include "well_known_classes.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070062
63namespace openjdkjvmti {
64
Alex Light8c2b9292017-11-09 13:21:01 -080065void ArtJvmtiEventCallbacks::CopyExtensionsFrom(const ArtJvmtiEventCallbacks* cb) {
66 if (art::kIsDebugBuild) {
67 ArtJvmtiEventCallbacks clean;
68 DCHECK_EQ(memcmp(&clean, this, sizeof(clean)), 0)
69 << "CopyExtensionsFrom called with initialized eventsCallbacks!";
70 }
71 if (cb != nullptr) {
72 memcpy(this, cb, sizeof(*this));
73 } else {
74 memset(this, 0, sizeof(*this));
75 }
76}
77
78jvmtiError ArtJvmtiEventCallbacks::Set(jint index, jvmtiExtensionEvent cb) {
79 switch (index) {
80 case static_cast<jint>(ArtJvmtiEvent::kDdmPublishChunk):
81 DdmPublishChunk = reinterpret_cast<ArtJvmtiEventDdmPublishChunk>(cb);
82 return OK;
83 default:
84 return ERR(ILLEGAL_ARGUMENT);
85 }
86}
87
88
89bool IsExtensionEvent(jint e) {
90 return e >= static_cast<jint>(ArtJvmtiEvent::kMinEventTypeVal) &&
91 e <= static_cast<jint>(ArtJvmtiEvent::kMaxEventTypeVal) &&
92 IsExtensionEvent(static_cast<ArtJvmtiEvent>(e));
93}
94
95bool IsExtensionEvent(ArtJvmtiEvent e) {
96 switch (e) {
97 case ArtJvmtiEvent::kDdmPublishChunk:
98 return true;
99 default:
100 return false;
101 }
102}
103
Alex Light73afd322017-01-18 11:17:47 -0800104bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
105 return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
106}
107
Andreas Gampe77708d92016-10-07 11:48:21 -0700108EventMask& EventMasks::GetEventMask(art::Thread* thread) {
109 if (thread == nullptr) {
110 return global_event_mask;
111 }
112
113 for (auto& pair : thread_event_masks) {
114 const UniqueThread& unique_thread = pair.first;
115 if (unique_thread.first == thread &&
116 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
117 return pair.second;
118 }
119 }
120
121 // TODO: Remove old UniqueThread with the same pointer, if exists.
122
123 thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
124 return thread_event_masks.back().second;
125}
126
127EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
128 if (thread == nullptr) {
129 return &global_event_mask;
130 }
131
132 for (auto& pair : thread_event_masks) {
133 const UniqueThread& unique_thread = pair.first;
134 if (unique_thread.first == thread &&
135 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
136 return &pair.second;
137 }
138 }
139
140 return nullptr;
141}
142
143
Alex Light74c84402017-11-29 15:26:38 -0800144void EventMasks::EnableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
145 DCHECK_EQ(&env->event_masks, this);
146 env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
Andreas Gampe77708d92016-10-07 11:48:21 -0700147 DCHECK(EventMask::EventIsInRange(event));
148 GetEventMask(thread).Set(event);
149 if (thread != nullptr) {
150 unioned_thread_event_mask.Set(event, true);
151 }
152}
153
Alex Light74c84402017-11-29 15:26:38 -0800154void EventMasks::DisableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
155 DCHECK_EQ(&env->event_masks, this);
156 env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
Andreas Gampe77708d92016-10-07 11:48:21 -0700157 DCHECK(EventMask::EventIsInRange(event));
158 GetEventMask(thread).Set(event, false);
159 if (thread != nullptr) {
160 // Regenerate union for the event.
161 bool union_value = false;
162 for (auto& pair : thread_event_masks) {
163 union_value |= pair.second.Test(event);
164 if (union_value) {
165 break;
166 }
167 }
168 unioned_thread_event_mask.Set(event, union_value);
169 }
170}
171
Alex Light73afd322017-01-18 11:17:47 -0800172void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
173 if (UNLIKELY(caps.can_retransform_classes == 1)) {
174 // If we are giving this env the retransform classes cap we need to switch all events of
175 // NonTransformable to Transformable and vice versa.
176 ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
177 : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
178 ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
179 : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
180 if (global_event_mask.Test(to_remove)) {
181 CHECK(!global_event_mask.Test(to_add));
182 global_event_mask.Set(to_remove, false);
183 global_event_mask.Set(to_add, true);
184 }
185
186 if (unioned_thread_event_mask.Test(to_remove)) {
187 CHECK(!unioned_thread_event_mask.Test(to_add));
188 unioned_thread_event_mask.Set(to_remove, false);
189 unioned_thread_event_mask.Set(to_add, true);
190 }
191 for (auto thread_mask : thread_event_masks) {
192 if (thread_mask.second.Test(to_remove)) {
193 CHECK(!thread_mask.second.Test(to_add));
194 thread_mask.second.Set(to_remove, false);
195 thread_mask.second.Set(to_add, true);
196 }
197 }
198 }
199}
200
Andreas Gampe77708d92016-10-07 11:48:21 -0700201void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
Alex Light2a96fe82018-01-22 17:45:02 -0800202 art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
Alex Lightb284f8d2017-11-21 00:00:48 +0000203 envs.push_back(env);
Andreas Gampe77708d92016-10-07 11:48:21 -0700204}
205
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800206void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
Alex Light2a96fe82018-01-22 17:45:02 -0800207 art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
Alex Lightbb766462017-04-12 16:13:33 -0700208 // Since we might be currently iterating over the envs list we cannot actually erase elements.
209 // Instead we will simply replace them with 'nullptr' and skip them manually.
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800210 auto it = std::find(envs.begin(), envs.end(), env);
211 if (it != envs.end()) {
Alex Lightb284f8d2017-11-21 00:00:48 +0000212 envs.erase(it);
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800213 for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
214 i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
215 ++i) {
Alex Lightb284f8d2017-11-21 00:00:48 +0000216 RecalculateGlobalEventMaskLocked(static_cast<ArtJvmtiEvent>(i));
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800217 }
218 }
219}
220
Alex Light40d87f42017-01-18 10:27:06 -0800221static bool IsThreadControllable(ArtJvmtiEvent event) {
Andreas Gampe77708d92016-10-07 11:48:21 -0700222 switch (event) {
Alex Light40d87f42017-01-18 10:27:06 -0800223 case ArtJvmtiEvent::kVmInit:
224 case ArtJvmtiEvent::kVmStart:
225 case ArtJvmtiEvent::kVmDeath:
226 case ArtJvmtiEvent::kThreadStart:
227 case ArtJvmtiEvent::kCompiledMethodLoad:
228 case ArtJvmtiEvent::kCompiledMethodUnload:
229 case ArtJvmtiEvent::kDynamicCodeGenerated:
230 case ArtJvmtiEvent::kDataDumpRequest:
Andreas Gampe77708d92016-10-07 11:48:21 -0700231 return false;
232
233 default:
234 return true;
235 }
236}
237
Alex Light9df79b72017-09-12 08:57:31 -0700238template<typename Type>
Vladimir Markof52d92f2019-03-29 12:33:02 +0000239static Type AddLocalRef(art::JNIEnvExt* e, art::ObjPtr<art::mirror::Object> obj)
Alex Light9df79b72017-09-12 08:57:31 -0700240 REQUIRES_SHARED(art::Locks::mutator_lock_) {
241 return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
242}
243
244template<ArtJvmtiEvent kEvent, typename ...Args>
245static void RunEventCallback(EventHandler* handler,
246 art::Thread* self,
247 art::JNIEnvExt* jnienv,
248 Args... args)
249 REQUIRES_SHARED(art::Locks::mutator_lock_) {
250 ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
251 handler->DispatchEvent<kEvent>(self,
252 static_cast<JNIEnv*>(jnienv),
253 thread_jni.get(),
254 args...);
255}
256
Alex Light8c2b9292017-11-09 13:21:01 -0800257static void SetupDdmTracking(art::DdmCallback* listener, bool enable) {
258 art::ScopedObjectAccess soa(art::Thread::Current());
259 if (enable) {
260 art::Runtime::Current()->GetRuntimeCallbacks()->AddDdmCallback(listener);
261 } else {
262 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveDdmCallback(listener);
263 }
264}
265
266class JvmtiDdmChunkListener : public art::DdmCallback {
267 public:
268 explicit JvmtiDdmChunkListener(EventHandler* handler) : handler_(handler) {}
269
270 void DdmPublishChunk(uint32_t type, const art::ArrayRef<const uint8_t>& data)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100271 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light8c2b9292017-11-09 13:21:01 -0800272 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kDdmPublishChunk)) {
273 art::Thread* self = art::Thread::Current();
274 handler_->DispatchEvent<ArtJvmtiEvent::kDdmPublishChunk>(
275 self,
276 static_cast<JNIEnv*>(self->GetJniEnv()),
277 static_cast<jint>(type),
278 static_cast<jint>(data.size()),
279 reinterpret_cast<const jbyte*>(data.data()));
280 }
281 }
282
283 private:
284 EventHandler* handler_;
285
286 DISALLOW_COPY_AND_ASSIGN(JvmtiDdmChunkListener);
287};
288
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700289class JvmtiAllocationListener : public art::gc::AllocationListener {
290 public:
291 explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
292
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700293 void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100294 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700295 DCHECK_EQ(self, art::Thread::Current());
296
Alex Light40d87f42017-01-18 10:27:06 -0800297 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
Mathieu Chartiera7118042016-10-12 15:45:58 -0700298 art::StackHandleScope<1> hs(self);
299 auto h = hs.NewHandleWrapper(obj);
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700300 // jvmtiEventVMObjectAlloc parameters:
301 // jvmtiEnv *jvmti_env,
302 // JNIEnv* jni_env,
303 // jthread thread,
304 // jobject object,
305 // jclass object_klass,
306 // jlong size
307 art::JNIEnvExt* jni_env = self->GetJniEnv();
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700308 ScopedLocalRef<jobject> object(
309 jni_env, jni_env->AddLocalReference<jobject>(*obj));
310 ScopedLocalRef<jclass> klass(
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700311 jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700312
Alex Light9df79b72017-09-12 08:57:31 -0700313 RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
314 self,
315 jni_env,
316 object.get(),
317 klass.get(),
318 static_cast<jlong>(byte_count));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700319 }
320 }
321
322 private:
323 EventHandler* handler_;
324};
325
326static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
Andreas Gampec02685c2016-10-17 17:40:27 -0700327 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
328 // now, do a workaround: (possibly) acquire and release.
329 art::ScopedObjectAccess soa(art::Thread::Current());
330 art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700331 if (enable) {
332 art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
333 } else {
334 art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
335 }
336}
337
Alex Light77fee872017-09-05 14:51:49 -0700338class JvmtiMonitorListener : public art::MonitorCallback {
339 public:
340 explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
341
342 void MonitorContendedLocking(art::Monitor* m)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100343 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700344 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
345 art::Thread* self = art::Thread::Current();
346 art::JNIEnvExt* jnienv = self->GetJniEnv();
347 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
348 RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
349 handler_,
350 self,
351 jnienv,
352 mon.get());
353 }
354 }
355
356 void MonitorContendedLocked(art::Monitor* m)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100357 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700358 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
359 art::Thread* self = art::Thread::Current();
360 art::JNIEnvExt* jnienv = self->GetJniEnv();
361 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
362 RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
363 handler_,
364 self,
365 jnienv,
366 mon.get());
367 }
368 }
369
370 void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100371 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700372 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
373 art::Thread* self = art::Thread::Current();
374 art::JNIEnvExt* jnienv = self->GetJniEnv();
375 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
376 RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
377 handler_,
378 self,
379 jnienv,
380 mon.get(),
381 static_cast<jlong>(timeout));
382 }
383 }
384
385
386 // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
387 // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
388 // never go to sleep (due to not having the lock, having bad arguments, or having an exception
389 // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
390 //
391 // This does not fully match the RI semantics. Specifically, we will not send the
392 // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
393 // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
394 // send this event and return without going to sleep.
395 //
396 // See b/65558434 for more discussion.
397 void MonitorWaitFinished(art::Monitor* m, bool timeout)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100398 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700399 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
400 art::Thread* self = art::Thread::Current();
401 art::JNIEnvExt* jnienv = self->GetJniEnv();
402 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
403 RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
404 handler_,
405 self,
406 jnienv,
407 mon.get(),
408 static_cast<jboolean>(timeout));
409 }
410 }
411
412 private:
413 EventHandler* handler_;
414};
415
Charles Munger5cc0e752018-11-09 12:30:46 -0800416class JvmtiParkListener : public art::ParkCallback {
417 public:
418 explicit JvmtiParkListener(EventHandler* handler) : handler_(handler) {}
419
420 void ThreadParkStart(bool is_absolute, int64_t timeout)
421 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
422 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
423 art::Thread* self = art::Thread::Current();
424 art::JNIEnvExt* jnienv = self->GetJniEnv();
425 art::ArtField* parkBlockerField = art::jni::DecodeArtField(
426 art::WellKnownClasses::java_lang_Thread_parkBlocker);
427 art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
428 if (blocker_obj.IsNull()) {
429 blocker_obj = self->GetPeer();
430 }
431 int64_t timeout_ms;
432 if (!is_absolute) {
433 if (timeout == 0) {
434 timeout_ms = 0;
435 } else {
436 timeout_ms = timeout / 1000000;
437 if (timeout_ms == 0) {
438 // If we were instructed to park for a nonzero number of nanoseconds, but not enough
439 // to be a full millisecond, round up to 1 ms. A nonzero park() call will return
440 // soon, but a 0 wait or park call will wait indefinitely.
441 timeout_ms = 1;
442 }
443 }
444 } else {
445 struct timeval tv;
446 gettimeofday(&tv, (struct timezone *) nullptr);
447 int64_t now = tv.tv_sec * 1000LL + tv.tv_usec / 1000;
448 if (now < timeout) {
449 timeout_ms = timeout - now;
450 } else {
451 // Waiting for 0 ms is an indefinite wait; parking until a time in
452 // the past or the current time will return immediately, so emulate
453 // the shortest possible wait event.
454 timeout_ms = 1;
455 }
456 }
457 ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
458 RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
459 handler_,
460 self,
461 jnienv,
462 blocker.get(),
463 static_cast<jlong>(timeout_ms));
464 }
465 }
466
467
468 // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
469 // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
470 // never go to sleep (due to not having the lock, having bad arguments, or having an exception
471 // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
472 //
473 // This does not fully match the RI semantics. Specifically, we will not send the
474 // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
475 // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
476 // send this event and return without going to sleep.
477 //
478 // See b/65558434 for more discussion.
479 void ThreadParkFinished(bool timeout) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
480 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
481 art::Thread* self = art::Thread::Current();
482 art::JNIEnvExt* jnienv = self->GetJniEnv();
483 art::ArtField* parkBlockerField = art::jni::DecodeArtField(
484 art::WellKnownClasses::java_lang_Thread_parkBlocker);
485 art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
486 if (blocker_obj.IsNull()) {
487 blocker_obj = self->GetPeer();
488 }
489 ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
490 RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
491 handler_,
492 self,
493 jnienv,
494 blocker.get(),
495 static_cast<jboolean>(timeout));
496 }
497 }
498
499 private:
500 EventHandler* handler_;
501};
502
503static void SetupMonitorListener(art::MonitorCallback* monitor_listener, art::ParkCallback* park_listener, bool enable) {
Alex Light77fee872017-09-05 14:51:49 -0700504 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
505 // now, do a workaround: (possibly) acquire and release.
506 art::ScopedObjectAccess soa(art::Thread::Current());
507 if (enable) {
Charles Munger5cc0e752018-11-09 12:30:46 -0800508 art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(monitor_listener);
509 art::Runtime::Current()->GetRuntimeCallbacks()->AddParkCallback(park_listener);
Alex Light77fee872017-09-05 14:51:49 -0700510 } else {
Charles Munger5cc0e752018-11-09 12:30:46 -0800511 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(monitor_listener);
512 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveParkCallback(park_listener);
Alex Light77fee872017-09-05 14:51:49 -0700513 }
514}
515
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700516// Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
517class JvmtiGcPauseListener : public art::gc::GcPauseListener {
518 public:
519 explicit JvmtiGcPauseListener(EventHandler* handler)
520 : handler_(handler),
521 start_enabled_(false),
522 finish_enabled_(false) {}
523
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100524 void StartPause() override {
Alex Lightb284f8d2017-11-21 00:00:48 +0000525 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(art::Thread::Current());
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700526 }
527
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100528 void EndPause() override {
Alex Lightb284f8d2017-11-21 00:00:48 +0000529 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(art::Thread::Current());
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700530 }
531
532 bool IsEnabled() {
533 return start_enabled_ || finish_enabled_;
534 }
535
536 void SetStartEnabled(bool e) {
537 start_enabled_ = e;
538 }
539
540 void SetFinishEnabled(bool e) {
541 finish_enabled_ = e;
542 }
543
544 private:
545 EventHandler* handler_;
546 bool start_enabled_;
547 bool finish_enabled_;
548};
549
Alex Light40d87f42017-01-18 10:27:06 -0800550static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700551 bool old_state = listener->IsEnabled();
552
Alex Light40d87f42017-01-18 10:27:06 -0800553 if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700554 listener->SetStartEnabled(enable);
555 } else {
556 listener->SetFinishEnabled(enable);
557 }
558
559 bool new_state = listener->IsEnabled();
560
561 if (old_state != new_state) {
562 if (new_state) {
563 art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
564 } else {
565 art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
566 }
567 }
568}
569
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100570class JvmtiMethodTraceListener final : public art::instrumentation::InstrumentationListener {
Alex Lightb7edcda2017-04-27 13:20:31 -0700571 public:
572 explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
573
Alex Lightb7edcda2017-04-27 13:20:31 -0700574 // Call-back for when a method is entered.
575 void MethodEntered(art::Thread* self,
576 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
577 art::ArtMethod* method,
578 uint32_t dex_pc ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100579 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700580 if (!method->IsRuntimeMethod() &&
581 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
582 art::JNIEnvExt* jnienv = self->GetJniEnv();
Alex Light77fee872017-09-05 14:51:49 -0700583 RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
584 self,
Alex Lightb7edcda2017-04-27 13:20:31 -0700585 jnienv,
586 art::jni::EncodeArtMethod(method));
587 }
588 }
589
590 // Callback for when a method is exited with a reference return value.
591 void MethodExited(art::Thread* self,
592 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
593 art::ArtMethod* method,
594 uint32_t dex_pc ATTRIBUTE_UNUSED,
595 art::Handle<art::mirror::Object> return_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100596 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700597 if (!method->IsRuntimeMethod() &&
598 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
Alex Lightc9167362018-06-11 16:46:43 -0700599 DCHECK_EQ(
600 method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
601 art::Primitive::kPrimNot) << method->PrettyMethod();
Alex Lightb7edcda2017-04-27 13:20:31 -0700602 DCHECK(!self->IsExceptionPending());
603 jvalue val;
604 art::JNIEnvExt* jnienv = self->GetJniEnv();
605 ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
606 val.l = return_jobj.get();
607 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700608 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700609 self,
610 jnienv,
611 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700612 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700613 val);
614 }
615 }
616
617 // Call-back for when a method is exited.
618 void MethodExited(art::Thread* self,
619 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
620 art::ArtMethod* method,
621 uint32_t dex_pc ATTRIBUTE_UNUSED,
622 const art::JValue& return_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100623 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700624 if (!method->IsRuntimeMethod() &&
625 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
Alex Lightc9167362018-06-11 16:46:43 -0700626 DCHECK_NE(
627 method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
628 art::Primitive::kPrimNot) << method->PrettyMethod();
Alex Lightb7edcda2017-04-27 13:20:31 -0700629 DCHECK(!self->IsExceptionPending());
630 jvalue val;
631 art::JNIEnvExt* jnienv = self->GetJniEnv();
632 // 64bit integer is the largest value in the union so we should be fine simply copying it into
633 // the union.
634 val.j = return_value.GetJ();
635 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700636 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700637 self,
638 jnienv,
639 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700640 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700641 val);
642 }
643 }
644
645 // Call-back for when a method is popped due to an exception throw. A method will either cause a
646 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
647 void MethodUnwind(art::Thread* self,
648 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
649 art::ArtMethod* method,
650 uint32_t dex_pc ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100651 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700652 if (!method->IsRuntimeMethod() &&
653 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
654 jvalue val;
655 // Just set this to 0xffffffffffffffff so it's not uninitialized.
656 val.j = static_cast<jlong>(-1);
657 art::JNIEnvExt* jnienv = self->GetJniEnv();
658 art::StackHandleScope<1> hs(self);
659 art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
660 CHECK(!old_exception.IsNull());
661 self->ClearException();
662 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700663 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700664 self,
665 jnienv,
666 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700667 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_TRUE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700668 val);
669 // Match RI behavior of just throwing away original exception if a new one is thrown.
670 if (LIKELY(!self->IsExceptionPending())) {
671 self->SetException(old_exception.Get());
672 }
673 }
674 }
675
Alex Lighta26e3492017-06-27 17:55:37 -0700676 // Call-back for when the dex pc moves in a method.
677 void DexPcMoved(art::Thread* self,
Alex Lightb7edcda2017-04-27 13:20:31 -0700678 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
Alex Lighta26e3492017-06-27 17:55:37 -0700679 art::ArtMethod* method,
680 uint32_t new_dex_pc)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100681 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lighta26e3492017-06-27 17:55:37 -0700682 DCHECK(!method->IsRuntimeMethod());
683 // Default methods might be copied to multiple classes. We need to get the canonical version of
684 // this method so that we can check for breakpoints correctly.
685 // TODO We should maybe do this on other events to ensure that we are consistent WRT default
686 // methods. This could interact with obsolete methods if we ever let interface redefinition
687 // happen though.
688 method = method->GetCanonicalMethod();
689 art::JNIEnvExt* jnienv = self->GetJniEnv();
690 jmethodID jmethod = art::jni::EncodeArtMethod(method);
691 jlocation location = static_cast<jlocation>(new_dex_pc);
692 // Step event is reported first according to the spec.
693 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
Alex Light77fee872017-09-05 14:51:49 -0700694 RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
Alex Lighta26e3492017-06-27 17:55:37 -0700695 }
696 // Next we do the Breakpoint events. The Dispatch code will filter the individual
697 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
Alex Light77fee872017-09-05 14:51:49 -0700698 RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
Alex Lighta26e3492017-06-27 17:55:37 -0700699 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700700 }
701
702 // Call-back for when we read from a field.
Alex Light084fa372017-06-16 08:58:34 -0700703 void FieldRead(art::Thread* self,
704 art::Handle<art::mirror::Object> this_object,
705 art::ArtMethod* method,
706 uint32_t dex_pc,
707 art::ArtField* field)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100708 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700709 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
710 art::JNIEnvExt* jnienv = self->GetJniEnv();
711 // DCHECK(!self->IsExceptionPending());
712 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
713 ScopedLocalRef<jobject> fklass(jnienv,
714 AddLocalRef<jobject>(jnienv,
715 field->GetDeclaringClass().Ptr()));
Alex Light77fee872017-09-05 14:51:49 -0700716 RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
717 self,
Alex Light084fa372017-06-16 08:58:34 -0700718 jnienv,
719 art::jni::EncodeArtMethod(method),
720 static_cast<jlocation>(dex_pc),
721 static_cast<jclass>(fklass.get()),
722 this_ref.get(),
723 art::jni::EncodeArtField(field));
724 }
725 }
726
727 void FieldWritten(art::Thread* self,
728 art::Handle<art::mirror::Object> this_object,
729 art::ArtMethod* method,
730 uint32_t dex_pc,
731 art::ArtField* field,
732 art::Handle<art::mirror::Object> new_val)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100733 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700734 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
735 art::JNIEnvExt* jnienv = self->GetJniEnv();
736 // DCHECK(!self->IsExceptionPending());
737 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
738 ScopedLocalRef<jobject> fklass(jnienv,
739 AddLocalRef<jobject>(jnienv,
740 field->GetDeclaringClass().Ptr()));
741 ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
742 jvalue val;
743 val.l = fval.get();
744 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
Alex Light77fee872017-09-05 14:51:49 -0700745 event_handler_,
Alex Light084fa372017-06-16 08:58:34 -0700746 self,
747 jnienv,
748 art::jni::EncodeArtMethod(method),
749 static_cast<jlocation>(dex_pc),
750 static_cast<jclass>(fklass.get()),
751 field->IsStatic() ? nullptr : this_ref.get(),
752 art::jni::EncodeArtField(field),
753 'L', // type_char
754 val);
755 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700756 }
757
758 // Call-back for when we write into a field.
Alex Light084fa372017-06-16 08:58:34 -0700759 void FieldWritten(art::Thread* self,
760 art::Handle<art::mirror::Object> this_object,
761 art::ArtMethod* method,
762 uint32_t dex_pc,
763 art::ArtField* field,
764 const art::JValue& field_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100765 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700766 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
767 art::JNIEnvExt* jnienv = self->GetJniEnv();
768 DCHECK(!self->IsExceptionPending());
769 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
770 ScopedLocalRef<jobject> fklass(jnienv,
771 AddLocalRef<jobject>(jnienv,
772 field->GetDeclaringClass().Ptr()));
773 char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
774 jvalue val;
775 // 64bit integer is the largest value in the union so we should be fine simply copying it into
776 // the union.
777 val.j = field_value.GetJ();
778 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
Alex Light77fee872017-09-05 14:51:49 -0700779 event_handler_,
Alex Light084fa372017-06-16 08:58:34 -0700780 self,
781 jnienv,
782 art::jni::EncodeArtMethod(method),
783 static_cast<jlocation>(dex_pc),
784 static_cast<jclass>(fklass.get()),
785 field->IsStatic() ? nullptr : this_ref.get(), // nb static field modification get given
786 // the class as this_object for some
787 // reason.
788 art::jni::EncodeArtField(field),
789 type_char,
790 val);
791 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700792 }
793
Alex Lighte814f9d2017-07-31 16:14:39 -0700794 void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100795 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lighte814f9d2017-07-31 16:14:39 -0700796 art::JNIEnvExt* jnienv = self->GetJniEnv();
Alex Light3dacdd62019-03-12 15:45:47 +0000797 // Remove the force-interpreter added by the WatchFrame.
798 {
799 art::MutexLock mu(self, *art::Locks::thread_list_lock_);
800 CHECK_GT(self->ForceInterpreterCount(), 0u);
801 self->DecrementForceInterpreterCount();
802 }
Alex Light9df79b72017-09-12 08:57:31 -0700803 jboolean is_exception_pending = self->IsExceptionPending();
804 RunEventCallback<ArtJvmtiEvent::kFramePop>(
805 event_handler_,
806 self,
807 jnienv,
808 art::jni::EncodeArtMethod(frame.GetMethod()),
809 is_exception_pending,
810 &frame);
Alex Lighte814f9d2017-07-31 16:14:39 -0700811 }
812
Alex Light9fb1ab12017-09-05 09:32:49 -0700813 static void FindCatchMethodsFromThrow(art::Thread* self,
814 art::Handle<art::mirror::Throwable> exception,
815 /*out*/ art::ArtMethod** out_method,
816 /*out*/ uint32_t* dex_pc)
817 REQUIRES_SHARED(art::Locks::mutator_lock_) {
818 // Finds the location where this exception will most likely be caught. We ignore intervening
819 // native frames (which could catch the exception) and return the closest java frame with a
820 // compatible catch statement.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100821 class CatchLocationFinder final : public art::StackVisitor {
Alex Light9fb1ab12017-09-05 09:32:49 -0700822 public:
823 CatchLocationFinder(art::Thread* target,
824 art::Handle<art::mirror::Class> exception_class,
825 art::Context* context,
826 /*out*/ art::ArtMethod** out_catch_method,
827 /*out*/ uint32_t* out_catch_pc)
828 REQUIRES_SHARED(art::Locks::mutator_lock_)
829 : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
830 exception_class_(exception_class),
831 catch_method_ptr_(out_catch_method),
832 catch_dex_pc_ptr_(out_catch_pc) {}
833
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100834 bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700835 art::ArtMethod* method = GetMethod();
836 DCHECK(method != nullptr);
837 if (method->IsRuntimeMethod()) {
838 return true;
839 }
840
841 if (!method->IsNative()) {
842 uint32_t cur_dex_pc = GetDexPc();
Andreas Gampee2abbc62017-09-15 11:59:26 -0700843 if (cur_dex_pc == art::dex::kDexNoIndex) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700844 // This frame looks opaque. Just keep on going.
845 return true;
846 }
847 bool has_no_move_exception = false;
848 uint32_t found_dex_pc = method->FindCatchBlock(
849 exception_class_, cur_dex_pc, &has_no_move_exception);
Andreas Gampee2abbc62017-09-15 11:59:26 -0700850 if (found_dex_pc != art::dex::kDexNoIndex) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700851 // We found the catch. Store the result and return.
852 *catch_method_ptr_ = method;
853 *catch_dex_pc_ptr_ = found_dex_pc;
854 return false;
855 }
856 }
857 return true;
858 }
859
860 private:
861 art::Handle<art::mirror::Class> exception_class_;
862 art::ArtMethod** catch_method_ptr_;
863 uint32_t* catch_dex_pc_ptr_;
864
865 DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
866 };
867
868 art::StackHandleScope<1> hs(self);
869 *out_method = nullptr;
870 *dex_pc = 0;
871 std::unique_ptr<art::Context> context(art::Context::Create());
872
873 CatchLocationFinder clf(self,
874 hs.NewHandle(exception->GetClass()),
875 context.get(),
876 /*out*/ out_method,
877 /*out*/ dex_pc);
Andreas Gampe6e897762018-10-16 13:09:32 -0700878 clf.WalkStack(/* include_transitions= */ false);
Alex Light9fb1ab12017-09-05 09:32:49 -0700879 }
880
Alex Light6e1607e2017-08-23 10:06:18 -0700881 // Call-back when an exception is thrown.
Alex Light9fb1ab12017-09-05 09:32:49 -0700882 void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100883 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light9fb1ab12017-09-05 09:32:49 -0700884 DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
885 // The instrumentation events get rid of this for us.
886 DCHECK(!self->IsExceptionPending());
887 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
888 art::JNIEnvExt* jnienv = self->GetJniEnv();
889 art::ArtMethod* catch_method;
890 uint32_t catch_pc;
891 FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
892 uint32_t dex_pc = 0;
893 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
Andreas Gampe6e897762018-10-16 13:09:32 -0700894 /* check_suspended= */ true,
895 /* abort_on_error= */ art::kIsDebugBuild);
Alex Light9fb1ab12017-09-05 09:32:49 -0700896 ScopedLocalRef<jobject> exception(jnienv,
897 AddLocalRef<jobject>(jnienv, exception_object.Get()));
898 RunEventCallback<ArtJvmtiEvent::kException>(
Alex Light77fee872017-09-05 14:51:49 -0700899 event_handler_,
Alex Light9fb1ab12017-09-05 09:32:49 -0700900 self,
901 jnienv,
902 art::jni::EncodeArtMethod(method),
903 static_cast<jlocation>(dex_pc),
904 exception.get(),
905 art::jni::EncodeArtMethod(catch_method),
906 static_cast<jlocation>(catch_pc));
907 }
908 return;
909 }
910
911 // Call-back when an exception is handled.
912 void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100913 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light9fb1ab12017-09-05 09:32:49 -0700914 // Since the exception has already been handled there shouldn't be one pending.
915 DCHECK(!self->IsExceptionPending());
916 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
917 art::JNIEnvExt* jnienv = self->GetJniEnv();
918 uint32_t dex_pc;
919 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
Andreas Gampe6e897762018-10-16 13:09:32 -0700920 /* check_suspended= */ true,
921 /* abort_on_error= */ art::kIsDebugBuild);
Alex Light9fb1ab12017-09-05 09:32:49 -0700922 ScopedLocalRef<jobject> exception(jnienv,
923 AddLocalRef<jobject>(jnienv, exception_object.Get()));
924 RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
Alex Light77fee872017-09-05 14:51:49 -0700925 event_handler_,
Alex Light9fb1ab12017-09-05 09:32:49 -0700926 self,
927 jnienv,
928 art::jni::EncodeArtMethod(method),
929 static_cast<jlocation>(dex_pc),
930 exception.get());
931 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700932 return;
933 }
934
935 // Call-back for when we execute a branch.
936 void Branch(art::Thread* self ATTRIBUTE_UNUSED,
937 art::ArtMethod* method ATTRIBUTE_UNUSED,
938 uint32_t dex_pc ATTRIBUTE_UNUSED,
939 int32_t dex_pc_offset ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100940 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700941 return;
942 }
943
Alex Lightb7edcda2017-04-27 13:20:31 -0700944 private:
945 EventHandler* const event_handler_;
946};
947
948static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
949 switch (event) {
950 case ArtJvmtiEvent::kMethodEntry:
951 return art::instrumentation::Instrumentation::kMethodEntered;
952 case ArtJvmtiEvent::kMethodExit:
953 return art::instrumentation::Instrumentation::kMethodExited |
954 art::instrumentation::Instrumentation::kMethodUnwind;
Alex Light084fa372017-06-16 08:58:34 -0700955 case ArtJvmtiEvent::kFieldModification:
956 return art::instrumentation::Instrumentation::kFieldWritten;
957 case ArtJvmtiEvent::kFieldAccess:
958 return art::instrumentation::Instrumentation::kFieldRead;
Alex Lighta26e3492017-06-27 17:55:37 -0700959 case ArtJvmtiEvent::kBreakpoint:
960 case ArtJvmtiEvent::kSingleStep:
961 return art::instrumentation::Instrumentation::kDexPcMoved;
Alex Lighte814f9d2017-07-31 16:14:39 -0700962 case ArtJvmtiEvent::kFramePop:
963 return art::instrumentation::Instrumentation::kWatchedFramePop;
Alex Light9fb1ab12017-09-05 09:32:49 -0700964 case ArtJvmtiEvent::kException:
965 return art::instrumentation::Instrumentation::kExceptionThrown;
966 case ArtJvmtiEvent::kExceptionCatch:
967 return art::instrumentation::Instrumentation::kExceptionHandled;
Alex Lightb7edcda2017-04-27 13:20:31 -0700968 default:
969 LOG(FATAL) << "Unknown event ";
Elliott Hughesc1896c92018-11-29 11:33:18 -0800970 UNREACHABLE();
Alex Lightb7edcda2017-04-27 13:20:31 -0700971 }
972}
973
Alex Light3dacdd62019-03-12 15:45:47 +0000974enum class DeoptRequirement {
975 // Limited/no deopt required.
976 kLimited,
977 // A single thread must be put into interpret only.
978 kThread,
979 // All methods and all threads deopted.
980 kFull,
981};
982
983static DeoptRequirement GetDeoptRequirement(ArtJvmtiEvent event, jthread thread) {
Alex Light0fa17862017-10-24 13:43:05 -0700984 switch (event) {
985 case ArtJvmtiEvent::kBreakpoint:
986 case ArtJvmtiEvent::kException:
Alex Light3dacdd62019-03-12 15:45:47 +0000987 return DeoptRequirement::kLimited;
988 // TODO MethodEntry is needed due to inconsistencies between the interpreter and the trampoline
989 // in how to handle exceptions.
Alex Light0fa17862017-10-24 13:43:05 -0700990 case ArtJvmtiEvent::kMethodEntry:
Alex Lightd7da3142018-07-18 15:39:16 +0000991 case ArtJvmtiEvent::kExceptionCatch:
Alex Light3dacdd62019-03-12 15:45:47 +0000992 return DeoptRequirement::kFull;
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000993 case ArtJvmtiEvent::kMethodExit:
Alex Light0fa17862017-10-24 13:43:05 -0700994 case ArtJvmtiEvent::kFieldModification:
995 case ArtJvmtiEvent::kFieldAccess:
996 case ArtJvmtiEvent::kSingleStep:
Nicolas Geoffrayad344b62019-03-09 17:49:52 +0000997 case ArtJvmtiEvent::kFramePop:
Alex Light3dacdd62019-03-12 15:45:47 +0000998 return thread == nullptr ? DeoptRequirement::kFull : DeoptRequirement::kThread;
Alex Light0fa17862017-10-24 13:43:05 -0700999 default:
1000 LOG(FATAL) << "Unexpected event type!";
1001 UNREACHABLE();
1002 }
1003}
1004
Alex Light3dacdd62019-03-12 15:45:47 +00001005jvmtiError EventHandler::SetupTraceListener(JvmtiMethodTraceListener* listener,
1006 ArtJvmtiEvent event,
1007 jthread thread,
1008 bool enable) {
1009 DeoptRequirement deopt_req = GetDeoptRequirement(event, thread);
Alex Light0fa17862017-10-24 13:43:05 -07001010 // Make sure we can deopt.
1011 {
1012 art::ScopedObjectAccess soa(art::Thread::Current());
1013 DeoptManager* deopt_manager = DeoptManager::Get();
Alex Light3dacdd62019-03-12 15:45:47 +00001014 jvmtiError err = OK;
Alex Light0fa17862017-10-24 13:43:05 -07001015 if (enable) {
1016 deopt_manager->AddDeoptimizationRequester();
Alex Light3dacdd62019-03-12 15:45:47 +00001017 switch (deopt_req) {
1018 case DeoptRequirement::kFull:
1019 deopt_manager->AddDeoptimizeAllMethods();
1020 break;
1021 case DeoptRequirement::kThread:
1022 err = deopt_manager->AddDeoptimizeThreadMethods(soa, thread);
1023 break;
1024 default:
1025 break;
1026 }
1027 if (err != OK) {
1028 deopt_manager->RemoveDeoptimizationRequester();
1029 return err;
Alex Light0fa17862017-10-24 13:43:05 -07001030 }
1031 } else {
Alex Light3dacdd62019-03-12 15:45:47 +00001032 switch (deopt_req) {
1033 case DeoptRequirement::kFull:
1034 deopt_manager->RemoveDeoptimizeAllMethods();
1035 break;
1036 case DeoptRequirement::kThread:
1037 err = deopt_manager->RemoveDeoptimizeThreadMethods(soa, thread);
1038 break;
1039 default:
1040 break;
Alex Light0fa17862017-10-24 13:43:05 -07001041 }
1042 deopt_manager->RemoveDeoptimizationRequester();
Alex Light3dacdd62019-03-12 15:45:47 +00001043 if (err != OK) {
1044 return err;
1045 }
Alex Light0fa17862017-10-24 13:43:05 -07001046 }
1047 }
1048
1049 // Add the actual listeners.
Alex Lightb7edcda2017-04-27 13:20:31 -07001050 uint32_t new_events = GetInstrumentationEventsFor(event);
Alex Lightf6df1b52017-11-29 14:46:53 -08001051 if (new_events == art::instrumentation::Instrumentation::kDexPcMoved) {
1052 // Need to skip adding the listeners if the event is breakpoint/single-step since those events
1053 // share the same art-instrumentation underlying event. We need to give them their own deopt
1054 // request though so the test waits until here.
1055 DCHECK(event == ArtJvmtiEvent::kBreakpoint || event == ArtJvmtiEvent::kSingleStep);
1056 ArtJvmtiEvent other = event == ArtJvmtiEvent::kBreakpoint ? ArtJvmtiEvent::kSingleStep
1057 : ArtJvmtiEvent::kBreakpoint;
1058 if (IsEventEnabledAnywhere(other)) {
1059 // The event needs to be kept around/is already enabled by the other jvmti event that uses the
1060 // same instrumentation event.
Alex Light3dacdd62019-03-12 15:45:47 +00001061 return OK;
Alex Lightf6df1b52017-11-29 14:46:53 -08001062 }
1063 }
1064 art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
Alex Lightb7edcda2017-04-27 13:20:31 -07001065 art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
Alex Lightb7edcda2017-04-27 13:20:31 -07001066 art::ScopedSuspendAll ssa("jvmti method tracing installation");
1067 if (enable) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001068 instr->AddListener(listener, new_events);
1069 } else {
1070 instr->RemoveListener(listener, new_events);
1071 }
Alex Light3dacdd62019-03-12 15:45:47 +00001072 return OK;
Alex Lightb7edcda2017-04-27 13:20:31 -07001073}
1074
Alex Light0a5ec3d2017-07-25 16:50:26 -07001075// Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
1076// the switch interpreter) when we try to get or set a local variable.
Alex Lightbebd7bd2017-07-25 14:05:52 -07001077void EventHandler::HandleLocalAccessCapabilityAdded() {
Alex Light0a5ec3d2017-07-25 16:50:26 -07001078 class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
1079 public:
1080 explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
1081 : runtime_(runtime) {}
1082
1083 bool operator()(art::ObjPtr<art::mirror::Class> klass)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01001084 override REQUIRES(art::Locks::mutator_lock_) {
Alex Lighta567deb2017-10-10 16:44:11 -07001085 if (!klass->IsLoaded()) {
1086 // Skip classes that aren't loaded since they might not have fully allocated and initialized
1087 // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
1088 // these methods will definitately be using debuggable code.
1089 return true;
1090 }
Alex Light0a5ec3d2017-07-25 16:50:26 -07001091 for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
1092 const void* code = m.GetEntryPointFromQuickCompiledCode();
1093 if (m.IsNative() || m.IsProxyMethod()) {
1094 continue;
1095 } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
1096 !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
1097 runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
1098 }
1099 }
1100 return true;
1101 }
1102
1103 private:
1104 art::Runtime* runtime_;
1105 };
1106 art::ScopedObjectAccess soa(art::Thread::Current());
1107 UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
1108 art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
Alex Lightbebd7bd2017-07-25 14:05:52 -07001109}
1110
Alex Light77fee872017-09-05 14:51:49 -07001111bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
1112 std::array<ArtJvmtiEvent, 4> events {
1113 {
1114 ArtJvmtiEvent::kMonitorContendedEnter,
1115 ArtJvmtiEvent::kMonitorContendedEntered,
1116 ArtJvmtiEvent::kMonitorWait,
1117 ArtJvmtiEvent::kMonitorWaited
1118 }
1119 };
1120 for (ArtJvmtiEvent e : events) {
1121 if (e != event && IsEventEnabledAnywhere(e)) {
1122 return true;
1123 }
1124 }
1125 return false;
1126}
1127
Alex Light3dacdd62019-03-12 15:45:47 +00001128jvmtiError EventHandler::SetupFramePopTraceListener(jthread thread, bool enable) {
Alex Lightf5d5eb12018-03-06 15:13:59 -08001129 if (enable) {
1130 frame_pop_enabled = true;
Alex Light3dacdd62019-03-12 15:45:47 +00001131 return SetupTraceListener(
1132 method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, thread, enable);
Alex Lightf5d5eb12018-03-06 15:13:59 -08001133 } else {
1134 // remove the listener if we have no outstanding frames.
1135 {
1136 art::ReaderMutexLock mu(art::Thread::Current(), envs_lock_);
Alex Light3dacdd62019-03-12 15:45:47 +00001137 for (ArtJvmTiEnv *env : envs) {
Alex Lightf5d5eb12018-03-06 15:13:59 -08001138 art::ReaderMutexLock event_mu(art::Thread::Current(), env->event_info_mutex_);
1139 if (!env->notify_frames.empty()) {
1140 // Leaving FramePop listener since there are unsent FramePop events.
Alex Light3dacdd62019-03-12 15:45:47 +00001141 return OK;
Alex Lightf5d5eb12018-03-06 15:13:59 -08001142 }
1143 }
1144 frame_pop_enabled = false;
1145 }
Alex Light3dacdd62019-03-12 15:45:47 +00001146 return SetupTraceListener(
1147 method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, thread, enable);
Alex Lightf5d5eb12018-03-06 15:13:59 -08001148 }
1149}
1150
Andreas Gampe77708d92016-10-07 11:48:21 -07001151// Handle special work for the given event type, if necessary.
Alex Light3dacdd62019-03-12 15:45:47 +00001152jvmtiError EventHandler::HandleEventType(ArtJvmtiEvent event, jthread thread, bool enable) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001153 switch (event) {
Alex Light8c2b9292017-11-09 13:21:01 -08001154 case ArtJvmtiEvent::kDdmPublishChunk:
1155 SetupDdmTracking(ddm_listener_.get(), enable);
Alex Light3dacdd62019-03-12 15:45:47 +00001156 return OK;
Alex Light40d87f42017-01-18 10:27:06 -08001157 case ArtJvmtiEvent::kVmObjectAlloc:
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001158 SetupObjectAllocationTracking(alloc_listener_.get(), enable);
Alex Light3dacdd62019-03-12 15:45:47 +00001159 return OK;
Alex Light40d87f42017-01-18 10:27:06 -08001160 case ArtJvmtiEvent::kGarbageCollectionStart:
1161 case ArtJvmtiEvent::kGarbageCollectionFinish:
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001162 SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
Alex Light3dacdd62019-03-12 15:45:47 +00001163 return OK;
Alex Lightf5d5eb12018-03-06 15:13:59 -08001164 // FramePop can never be disabled once it's been turned on if it was turned off with outstanding
1165 // pop-events since we would either need to deal with dangling pointers or have missed events.
Alex Lighte814f9d2017-07-31 16:14:39 -07001166 case ArtJvmtiEvent::kFramePop:
Alex Lightf5d5eb12018-03-06 15:13:59 -08001167 if (enable && frame_pop_enabled) {
1168 // The frame-pop event was held on by pending events so we don't need to do anything.
Alex Lighte814f9d2017-07-31 16:14:39 -07001169 break;
1170 } else {
Alex Light3dacdd62019-03-12 15:45:47 +00001171 return SetupFramePopTraceListener(thread, enable);
Alex Lighte814f9d2017-07-31 16:14:39 -07001172 }
Alex Lightb7edcda2017-04-27 13:20:31 -07001173 case ArtJvmtiEvent::kMethodEntry:
1174 case ArtJvmtiEvent::kMethodExit:
Alex Light084fa372017-06-16 08:58:34 -07001175 case ArtJvmtiEvent::kFieldAccess:
1176 case ArtJvmtiEvent::kFieldModification:
Alex Light9fb1ab12017-09-05 09:32:49 -07001177 case ArtJvmtiEvent::kException:
1178 case ArtJvmtiEvent::kExceptionCatch:
Alex Lightf6df1b52017-11-29 14:46:53 -08001179 case ArtJvmtiEvent::kBreakpoint:
1180 case ArtJvmtiEvent::kSingleStep:
Alex Light3dacdd62019-03-12 15:45:47 +00001181 return SetupTraceListener(method_trace_listener_.get(), event, thread, enable);
Alex Light77fee872017-09-05 14:51:49 -07001182 case ArtJvmtiEvent::kMonitorContendedEnter:
1183 case ArtJvmtiEvent::kMonitorContendedEntered:
1184 case ArtJvmtiEvent::kMonitorWait:
1185 case ArtJvmtiEvent::kMonitorWaited:
1186 if (!OtherMonitorEventsEnabledAnywhere(event)) {
Charles Munger5cc0e752018-11-09 12:30:46 -08001187 SetupMonitorListener(monitor_listener_.get(), park_listener_.get(), enable);
Alex Light77fee872017-09-05 14:51:49 -07001188 }
Alex Light3dacdd62019-03-12 15:45:47 +00001189 return OK;
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001190 default:
1191 break;
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001192 }
Alex Light3dacdd62019-03-12 15:45:47 +00001193 return OK;
Andreas Gampe77708d92016-10-07 11:48:21 -07001194}
1195
Alex Light9db679d2017-01-25 15:28:04 -08001196// Checks to see if the env has the capabilities associated with the given event.
1197static bool HasAssociatedCapability(ArtJvmTiEnv* env,
1198 ArtJvmtiEvent event) {
1199 jvmtiCapabilities caps = env->capabilities;
1200 switch (event) {
1201 case ArtJvmtiEvent::kBreakpoint:
1202 return caps.can_generate_breakpoint_events == 1;
1203
1204 case ArtJvmtiEvent::kCompiledMethodLoad:
1205 case ArtJvmtiEvent::kCompiledMethodUnload:
1206 return caps.can_generate_compiled_method_load_events == 1;
1207
1208 case ArtJvmtiEvent::kException:
1209 case ArtJvmtiEvent::kExceptionCatch:
1210 return caps.can_generate_exception_events == 1;
1211
1212 case ArtJvmtiEvent::kFieldAccess:
1213 return caps.can_generate_field_access_events == 1;
1214
1215 case ArtJvmtiEvent::kFieldModification:
1216 return caps.can_generate_field_modification_events == 1;
1217
1218 case ArtJvmtiEvent::kFramePop:
1219 return caps.can_generate_frame_pop_events == 1;
1220
1221 case ArtJvmtiEvent::kGarbageCollectionStart:
1222 case ArtJvmtiEvent::kGarbageCollectionFinish:
1223 return caps.can_generate_garbage_collection_events == 1;
1224
1225 case ArtJvmtiEvent::kMethodEntry:
1226 return caps.can_generate_method_entry_events == 1;
1227
1228 case ArtJvmtiEvent::kMethodExit:
1229 return caps.can_generate_method_exit_events == 1;
1230
1231 case ArtJvmtiEvent::kMonitorContendedEnter:
1232 case ArtJvmtiEvent::kMonitorContendedEntered:
1233 case ArtJvmtiEvent::kMonitorWait:
1234 case ArtJvmtiEvent::kMonitorWaited:
1235 return caps.can_generate_monitor_events == 1;
1236
1237 case ArtJvmtiEvent::kNativeMethodBind:
1238 return caps.can_generate_native_method_bind_events == 1;
1239
1240 case ArtJvmtiEvent::kObjectFree:
1241 return caps.can_generate_object_free_events == 1;
1242
1243 case ArtJvmtiEvent::kSingleStep:
1244 return caps.can_generate_single_step_events == 1;
1245
1246 case ArtJvmtiEvent::kVmObjectAlloc:
1247 return caps.can_generate_vm_object_alloc_events == 1;
1248
1249 default:
1250 return true;
1251 }
1252}
1253
Andreas Gampe77708d92016-10-07 11:48:21 -07001254jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
Alex Light3dacdd62019-03-12 15:45:47 +00001255 jthread thread,
Alex Light40d87f42017-01-18 10:27:06 -08001256 ArtJvmtiEvent event,
Andreas Gampe77708d92016-10-07 11:48:21 -07001257 jvmtiEventMode mode) {
Andreas Gampe77708d92016-10-07 11:48:21 -07001258 if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
1259 return ERR(ILLEGAL_ARGUMENT);
1260 }
1261
1262 if (!EventMask::EventIsInRange(event)) {
1263 return ERR(INVALID_EVENT_TYPE);
1264 }
1265
Alex Light9db679d2017-01-25 15:28:04 -08001266 if (!HasAssociatedCapability(env, event)) {
1267 return ERR(MUST_POSSESS_CAPABILITY);
1268 }
1269
Alex Lighteaa48312019-04-08 16:14:33 +00001270 art::Thread* art_thread = nullptr;
1271 if (thread != nullptr) {
1272 if (!IsThreadControllable(event)) {
1273 return ERR(ILLEGAL_ARGUMENT);
1274 }
1275 art::ScopedObjectAccess soa(art::Thread::Current());
1276 art::MutexLock mu(soa.Self(), *art::Locks::thread_list_lock_);
1277 jvmtiError err = ERR(INTERNAL);
1278 if (!ThreadUtil::GetAliveNativeThread(thread, soa, &art_thread, &err)) {
1279 return err;
1280 } else if (art_thread->IsStillStarting()) {
1281 return ERR(THREAD_NOT_ALIVE);
1282 }
1283 art::ThreadState state = art_thread->GetState();
1284 if (state == art::ThreadState::kStarting || state == art::ThreadState::kTerminated) {
1285 return ERR(THREAD_NOT_ALIVE);
1286 }
Alex Light3dacdd62019-03-12 15:45:47 +00001287 }
1288
Alex Lighteaa48312019-04-08 16:14:33 +00001289 // TODO We use art_thread simply as a global unique identifier here. It is not safe to actually
1290 // use it without holding the thread_list_lock_.
Andreas Gampe77708d92016-10-07 11:48:21 -07001291
Alex Lighteaa48312019-04-08 16:14:33 +00001292 bool old_state;
1293 bool new_state;
Alex Lightc723b812019-04-08 16:13:24 +00001294
Alex Lighteaa48312019-04-08 16:14:33 +00001295 {
1296 // Change the event masks atomically.
1297 art::Thread* self = art::Thread::Current();
1298 art::WriterMutexLock mu(self, envs_lock_);
1299 art::WriterMutexLock mu_env_info(self, env->event_info_mutex_);
1300 old_state = global_mask.Test(event);
1301 if (mode == JVMTI_ENABLE) {
1302 env->event_masks.EnableEvent(env, art_thread, event);
1303 global_mask.Set(event);
1304 new_state = true;
1305 } else {
1306 DCHECK_EQ(mode, JVMTI_DISABLE);
Alex Lightc723b812019-04-08 16:13:24 +00001307
Alex Lighteaa48312019-04-08 16:14:33 +00001308 // TODO Replace art_thread with a uintptr_t or something to indicate we cannot read from it.
1309 env->event_masks.DisableEvent(env, art_thread, event);
1310 RecalculateGlobalEventMaskLocked(event);
1311 new_state = global_mask.Test(event);
Alex Light3fa8b6d2019-04-03 17:00:02 -07001312 }
Alex Lighteaa48312019-04-08 16:14:33 +00001313 }
1314
1315 // Handle any special work required for the event type.
1316 if (new_state != old_state) {
1317 return HandleEventType(event, thread, mode == JVMTI_ENABLE);
1318 }
1319
1320 return ERR(NONE);
Andreas Gampe77708d92016-10-07 11:48:21 -07001321}
1322
Alex Light0fa17862017-10-24 13:43:05 -07001323void EventHandler::HandleBreakpointEventsChanged(bool added) {
1324 if (added) {
1325 DeoptManager::Get()->AddDeoptimizationRequester();
1326 } else {
1327 DeoptManager::Get()->RemoveDeoptimizationRequester();
1328 }
1329}
1330
Alex Lightb7edcda2017-04-27 13:20:31 -07001331void EventHandler::Shutdown() {
1332 // Need to remove the method_trace_listener_ if it's there.
1333 art::Thread* self = art::Thread::Current();
1334 art::gc::ScopedGCCriticalSection gcs(self,
1335 art::gc::kGcCauseInstrumentation,
1336 art::gc::kCollectorTypeInstrumentation);
1337 art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
1338 // Just remove every possible event.
1339 art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
1340}
1341
Alex Light0e841182018-02-12 17:42:50 +00001342EventHandler::EventHandler()
Alex Lighteaa48312019-04-08 16:14:33 +00001343 : envs_lock_("JVMTI Environment List Lock", art::LockLevel::kTopLockLevel),
Alex Light0e841182018-02-12 17:42:50 +00001344 frame_pop_enabled(false) {
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001345 alloc_listener_.reset(new JvmtiAllocationListener(this));
Alex Light8c2b9292017-11-09 13:21:01 -08001346 ddm_listener_.reset(new JvmtiDdmChunkListener(this));
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001347 gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
Alex Lightb7edcda2017-04-27 13:20:31 -07001348 method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
Alex Light77fee872017-09-05 14:51:49 -07001349 monitor_listener_.reset(new JvmtiMonitorListener(this));
Charles Munger5cc0e752018-11-09 12:30:46 -08001350 park_listener_.reset(new JvmtiParkListener(this));
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001351}
1352
1353EventHandler::~EventHandler() {
1354}
1355
Andreas Gampe77708d92016-10-07 11:48:21 -07001356} // namespace openjdkjvmti