blob: 572586eefccddaf9b416aaab93846709c6b88f8e [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_INSTRUMENTATION_H_
18#define ART_RUNTIME_INSTRUMENTATION_H_
jeffhao725a9572012-11-13 18:20:12 -080019
Mythri Alle72be14e2021-11-01 11:48:06 +000020#include <stdint.h>
Mythri Alle5097f832021-11-02 14:52:30 +000021
22#include <functional>
Ian Rogers576ca0c2014-06-06 15:58:22 -070023#include <list>
Andreas Gampe7e56a072018-11-29 10:40:06 -080024#include <memory>
Mythri Alle72be14e2021-11-01 11:48:06 +000025#include <optional>
Mythri Alle5097f832021-11-02 14:52:30 +000026#include <unordered_set>
Ian Rogers576ca0c2014-06-06 15:58:22 -070027
Ian Rogersd582fa42014-11-05 23:46:43 -080028#include "arch/instruction_set.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070029#include "base/enums.h"
Andreas Gampe7e56a072018-11-29 10:40:06 -080030#include "base/locks.h"
Elliott Hughes76160052012-12-12 16:31:20 -080031#include "base/macros.h"
David Sehr67bf42e2018-02-26 16:43:04 -080032#include "base/safe_map.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070033#include "gc_root.h"
Mythri Alle5097f832021-11-02 14:52:30 +000034#include "offsets.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035
jeffhao725a9572012-11-13 18:20:12 -080036namespace art {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080038class Class;
39class Object;
40class Throwable;
Ian Rogers62d6c772013-02-27 08:32:07 -080041} // namespace mirror
Mathieu Chartierc7853442015-03-27 14:35:38 -070042class ArtField;
Mathieu Chartiere401d142015-04-22 13:56:20 -070043class ArtMethod;
Alex Lightd7661582017-05-01 13:48:16 -070044template <typename T> class Handle;
Alex Light2c8206f2018-06-08 14:51:09 -070045template <typename T> class MutableHandle;
Mythri Alle5097f832021-11-02 14:52:30 +000046struct NthCallerVisitor;
Ian Rogers62d6c772013-02-27 08:32:07 -080047union JValue;
Andreas Gampe7e56a072018-11-29 10:40:06 -080048class SHARED_LOCKABLE ReaderWriterMutex;
Alex Lighte814f9d2017-07-31 16:14:39 -070049class ShadowFrame;
jeffhao725a9572012-11-13 18:20:12 -080050class Thread;
Mingyao Yang2ee17902017-08-30 11:37:08 -070051enum class DeoptimizationMethodType;
jeffhao725a9572012-11-13 18:20:12 -080052
Ian Rogers62d6c772013-02-27 08:32:07 -080053namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080054
Sebastien Hertzee1997a2013-09-19 14:47:09 +020055
Andreas Gampe40da2862015-02-27 12:49:04 -080056// Do we want to deoptimize for method entry and exit listeners or just try to intercept
57// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
58// application's performance.
59static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
60
Alex Lightb7c640d2019-03-20 15:52:13 -070061// an optional frame is either Some(const ShadowFrame& current_frame) or None depending on if the
62// method being exited has a shadow-frame associed with the current stack frame. In cases where
63// there is no shadow-frame associated with this stack frame this will be None.
64using OptionalFrame = std::optional<std::reference_wrapper<const ShadowFrame>>;
65
Ian Rogers62d6c772013-02-27 08:32:07 -080066// Instrumentation event listener API. Registered listeners will get the appropriate call back for
67// the events they are listening for. The call backs supply the thread, method and dex_pc the event
68// occurred upon. The thread may or may not be Thread::Current().
69struct InstrumentationListener {
70 InstrumentationListener() {}
71 virtual ~InstrumentationListener() {}
72
73 // Call-back for when a method is entered.
Mythri Alle9cc65df2021-09-21 15:09:58 +000074 virtual void MethodEntered(Thread* thread, ArtMethod* method)
75 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080076
Alex Lightd7661582017-05-01 13:48:16 -070077 virtual void MethodExited(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -070078 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -070079 OptionalFrame frame,
80 MutableHandle<mirror::Object>& return_value)
Alex Lightd7661582017-05-01 13:48:16 -070081 REQUIRES_SHARED(Locks::mutator_lock_);
82
83 // Call-back for when a method is exited. The implementor should either handler-ize the return
84 // value (if appropriate) or use the alternate MethodExited callback instead if they need to
85 // go through a suspend point.
86 virtual void MethodExited(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -070087 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -070088 OptionalFrame frame,
89 JValue& return_value)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070090 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080091
92 // Call-back for when a method is popped due to an exception throw. A method will either cause a
93 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
Alex Lightd7661582017-05-01 13:48:16 -070094 virtual void MethodUnwind(Thread* thread,
95 Handle<mirror::Object> this_object,
96 ArtMethod* method,
97 uint32_t dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070098 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080099
100 // Call-back for when the dex pc moves in a method.
Alex Lightd7661582017-05-01 13:48:16 -0700101 virtual void DexPcMoved(Thread* thread,
102 Handle<mirror::Object> this_object,
103 ArtMethod* method,
104 uint32_t new_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700105 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800106
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200107 // Call-back for when we read from a field.
Alex Lightd7661582017-05-01 13:48:16 -0700108 virtual void FieldRead(Thread* thread,
109 Handle<mirror::Object> this_object,
110 ArtMethod* method,
111 uint32_t dex_pc,
112 ArtField* field) = 0;
113
114 virtual void FieldWritten(Thread* thread,
115 Handle<mirror::Object> this_object,
116 ArtMethod* method,
117 uint32_t dex_pc,
118 ArtField* field,
119 Handle<mirror::Object> field_value)
120 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200121
122 // Call-back for when we write into a field.
Alex Lightd7661582017-05-01 13:48:16 -0700123 virtual void FieldWritten(Thread* thread,
124 Handle<mirror::Object> this_object,
125 ArtMethod* method,
126 uint32_t dex_pc,
127 ArtField* field,
128 const JValue& field_value)
129 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200130
Alex Light6e1607e2017-08-23 10:06:18 -0700131 // Call-back when an exception is thrown.
132 virtual void ExceptionThrown(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -0700133 Handle<mirror::Throwable> exception_object)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700134 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800135
Alex Light9fb1ab12017-09-05 09:32:49 -0700136 // Call-back when an exception is caught/handled by java code.
137 virtual void ExceptionHandled(Thread* thread, Handle<mirror::Throwable> exception_object)
138 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
139
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000140 // Call-back for when we execute a branch.
141 virtual void Branch(Thread* thread,
142 ArtMethod* method,
143 uint32_t dex_pc,
144 int32_t dex_pc_offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700145 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100146
Alex Lighte814f9d2017-07-31 16:14:39 -0700147 // Call-back when a shadow_frame with the needs_notify_pop_ boolean set is popped off the stack by
148 // either return or exceptions. Normally instrumentation listeners should ensure that there are
149 // shadow-frames by deoptimizing stacks.
150 virtual void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED,
151 const ShadowFrame& frame ATTRIBUTE_UNUSED)
Alex Light05f47742017-09-14 00:34:44 +0000152 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
jeffhao725a9572012-11-13 18:20:12 -0800153};
154
Alex Light2c8206f2018-06-08 14:51:09 -0700155class Instrumentation;
156// A helper to send instrumentation events while popping the stack in a safe way.
157class InstrumentationStackPopper {
158 public:
159 explicit InstrumentationStackPopper(Thread* self);
160 ~InstrumentationStackPopper() REQUIRES_SHARED(Locks::mutator_lock_);
161
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000162 // Increase the number of frames being popped up to `stack_pointer`. Return true if the
163 // frames were popped without any exceptions, false otherwise. The exception that caused
164 // the pop is 'exception'.
165 bool PopFramesTo(uintptr_t stack_pointer, /*in-out*/MutableHandle<mirror::Throwable>& exception)
Alex Light2c8206f2018-06-08 14:51:09 -0700166 REQUIRES_SHARED(Locks::mutator_lock_);
167
168 private:
169 Thread* self_;
170 Instrumentation* instrumentation_;
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000171 // The stack pointer limit for frames to pop.
172 uintptr_t pop_until_;
Alex Light2c8206f2018-06-08 14:51:09 -0700173};
174
Ian Rogers62d6c772013-02-27 08:32:07 -0800175// Instrumentation is a catch-all for when extra information is required from the runtime. The
176// typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
177// to method entry and exit, it may also force execution to be switched to the interpreter and
178// trigger deoptimization.
jeffhao725a9572012-11-13 18:20:12 -0800179class Instrumentation {
180 public:
Ian Rogers62d6c772013-02-27 08:32:07 -0800181 enum InstrumentationEvent {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800182 kMethodEntered = 0x1,
183 kMethodExited = 0x2,
184 kMethodUnwind = 0x4,
185 kDexPcMoved = 0x8,
186 kFieldRead = 0x10,
187 kFieldWritten = 0x20,
Alex Light6e1607e2017-08-23 10:06:18 -0700188 kExceptionThrown = 0x40,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000189 kBranch = 0x80,
Alex Lighte814f9d2017-07-31 16:14:39 -0700190 kWatchedFramePop = 0x200,
Alex Light9fb1ab12017-09-05 09:32:49 -0700191 kExceptionHandled = 0x400,
Ian Rogers62d6c772013-02-27 08:32:07 -0800192 };
jeffhao725a9572012-11-13 18:20:12 -0800193
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200194 enum class InstrumentationLevel {
195 kInstrumentNothing, // execute without instrumentation
196 kInstrumentWithInstrumentationStubs, // execute with instrumentation entry/exit stubs
197 kInstrumentWithInterpreter // execute with interpreter
198 };
199
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700200 Instrumentation();
jeffhao725a9572012-11-13 18:20:12 -0800201
Mythri Alle5097f832021-11-02 14:52:30 +0000202 static constexpr MemberOffset NeedsEntryExitHooksOffset() {
Mythri Alle9575c122021-11-12 12:04:41 +0000203 // Assert that instrumentation_stubs_installed_ is 8bits wide. If the size changes
204 // update the compare instructions in the code generator when generating checks for
205 // MethodEntryExitHooks.
206 static_assert(sizeof(instrumentation_stubs_installed_) == 1,
207 "instrumentation_stubs_installed_ isn't expected size");
Mythri Alle5097f832021-11-02 14:52:30 +0000208 return MemberOffset(OFFSETOF_MEMBER(Instrumentation, instrumentation_stubs_installed_));
209 }
210
Ian Rogers62d6c772013-02-27 08:32:07 -0800211 // Add a listener to be notified of the masked together sent of instrumentation events. This
212 // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
213 // for saying you should have suspended all threads (installing stubs while threads are running
214 // will break).
215 void AddListener(InstrumentationListener* listener, uint32_t events)
Mathieu Chartier90443472015-07-16 20:32:27 -0700216 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800217
Ian Rogers62d6c772013-02-27 08:32:07 -0800218 // Removes a listener possibly removing instrumentation stubs.
219 void RemoveListener(InstrumentationListener* listener, uint32_t events)
Mathieu Chartier90443472015-07-16 20:32:27 -0700220 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800221
Mathieu Chartieraa516822015-10-02 15:53:37 -0700222 // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200223 void DisableDeoptimization(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700224 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800225 REQUIRES(!GetDeoptimizedMethodsLock());
Mathieu Chartieraa516822015-10-02 15:53:37 -0700226
Sebastien Hertza76a6d42014-03-20 16:40:17 +0100227 bool AreAllMethodsDeoptimized() const {
Mythri Alle9575c122021-11-12 12:04:41 +0000228 return InterpreterStubsInstalled();
Sebastien Hertza76a6d42014-03-20 16:40:17 +0100229 }
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700230 bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100231
232 // Executes everything with interpreter.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200233 void DeoptimizeEverything(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700234 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
235 REQUIRES(!Locks::thread_list_lock_,
236 !Locks::classlinker_classes_lock_,
Andreas Gampe7e56a072018-11-29 10:40:06 -0800237 !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100238
Mathieu Chartieraa516822015-10-02 15:53:37 -0700239 // Executes everything with compiled code (or interpreter if there is no code). May visit class
240 // linker classes through ConfigureStubs.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200241 void UndeoptimizeEverything(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700242 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
243 REQUIRES(!Locks::thread_list_lock_,
244 !Locks::classlinker_classes_lock_,
Andreas Gampe7e56a072018-11-29 10:40:06 -0800245 !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100246
247 // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
248 // method (except a class initializer) set to the resolution trampoline will be deoptimized only
249 // once its declaring class is initialized.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700250 void Deoptimize(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800251 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100252
253 // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
254 // (except a class initializer) set to the resolution trampoline will be updated only once its
255 // declaring class is initialized.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700256 void Undeoptimize(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800257 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100258
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200259 // Indicates whether the method has been deoptimized so it is executed with the interpreter.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700260 bool IsDeoptimized(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800261 REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100262
Mythri Alleab474882022-01-17 16:43:04 +0000263 // Indicates if any method needs to be deoptimized. This is used to avoid walking the stack to
264 // determine if a deoptimization is required.
265 bool IsDeoptimizedMethodsEmpty() const
266 REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
267
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200268 // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
269 void EnableMethodTracing(const char* key,
270 bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700271 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
272 REQUIRES(!Locks::thread_list_lock_,
273 !Locks::classlinker_classes_lock_,
Andreas Gampe7e56a072018-11-29 10:40:06 -0800274 !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100275
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200276 // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
277 void DisableMethodTracing(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700278 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
279 REQUIRES(!Locks::thread_list_lock_,
280 !Locks::classlinker_classes_lock_,
Andreas Gampe7e56a072018-11-29 10:40:06 -0800281 !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100282
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200283
Mathieu Chartier90443472015-07-16 20:32:27 -0700284 void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
285 void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700286 void InstrumentQuickAllocEntryPointsLocked()
Mathieu Chartier90443472015-07-16 20:32:27 -0700287 REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
288 !Locks::runtime_shutdown_lock_);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700289 void UninstrumentQuickAllocEntryPointsLocked()
Mathieu Chartier90443472015-07-16 20:32:27 -0700290 REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
291 !Locks::runtime_shutdown_lock_);
292 void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
Ian Rogersfa824272013-11-05 16:12:57 -0800293
Nicolas Geoffray854af032021-12-21 08:32:42 +0000294 // Returns a string representation of the given entry point.
295 static std::string EntryPointString(const void* code);
296
297 // Initialize the entrypoint of the method .`aot_code` is the AOT code.
298 void InitializeMethodsCode(ArtMethod* method, const void* aot_code)
299 REQUIRES_SHARED(Locks::mutator_lock_);
300
Ian Rogers62d6c772013-02-27 08:32:07 -0800301 // Update the code of a method respecting any installed stubs.
Nicolas Geoffray854af032021-12-21 08:32:42 +0000302 void UpdateMethodsCode(ArtMethod* method, const void* new_code)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800303 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Ian Rogers62d6c772013-02-27 08:32:07 -0800304
Nicolas Geoffraya6e0e7d2018-01-26 13:16:50 +0000305 // Update the code of a native method to a JITed stub.
Nicolas Geoffray854af032021-12-21 08:32:42 +0000306 void UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* new_code)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800307 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Nicolas Geoffraya6e0e7d2018-01-26 13:16:50 +0000308
Alex Light2d441b12018-06-08 15:33:21 -0700309 // Return the code that we can execute for an invoke including from the JIT.
Nicolas Geoffrayc25a9f92021-12-13 17:22:43 +0000310 const void* GetCodeForInvoke(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800311
312 void ForceInterpretOnly() {
Ian Rogers62d6c772013-02-27 08:32:07 -0800313 forced_interpret_only_ = true;
314 }
315
Mythri Alle9575c122021-11-12 12:04:41 +0000316 bool EntryExitStubsInstalled() const {
317 return instrumentation_level_ == InstrumentationLevel::kInstrumentWithInstrumentationStubs ||
318 instrumentation_level_ == InstrumentationLevel::kInstrumentWithInterpreter;
319 }
320
321 bool InterpreterStubsInstalled() const {
322 return instrumentation_level_ == InstrumentationLevel::kInstrumentWithInterpreter;
323 }
324
Brian Carlstromea46f952013-07-30 01:26:50 -0700325 // Called by ArtMethod::Invoke to determine dispatch mechanism.
Ian Rogers62d6c772013-02-27 08:32:07 -0800326 bool InterpretOnly() const {
Mythri Alle9575c122021-11-12 12:04:41 +0000327 return forced_interpret_only_ || InterpreterStubsInstalled();
Ian Rogers62d6c772013-02-27 08:32:07 -0800328 }
Nicolas Geoffrayc25a9f92021-12-13 17:22:43 +0000329 bool InterpretOnly(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800330
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800331 bool IsForcedInterpretOnly() const {
332 return forced_interpret_only_;
333 }
334
Ian Rogers62d6c772013-02-27 08:32:07 -0800335 bool AreExitStubsInstalled() const {
336 return instrumentation_stubs_installed_;
337 }
338
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700339 bool HasMethodEntryListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200340 return have_method_entry_listeners_;
341 }
342
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700343 bool HasMethodExitListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200344 return have_method_exit_listeners_;
345 }
346
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700347 bool HasMethodUnwindListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200348 return have_method_unwind_listeners_;
349 }
350
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700351 bool HasDexPcListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200352 return have_dex_pc_listeners_;
353 }
354
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700355 bool HasFieldReadListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200356 return have_field_read_listeners_;
357 }
358
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700359 bool HasFieldWriteListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200360 return have_field_write_listeners_;
361 }
362
Alex Light6e1607e2017-08-23 10:06:18 -0700363 bool HasExceptionThrownListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
364 return have_exception_thrown_listeners_;
Sebastien Hertz9f102032014-05-23 08:59:42 +0200365 }
366
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700367 bool HasBranchListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000368 return have_branch_listeners_;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800369 }
370
Alex Lighte814f9d2017-07-31 16:14:39 -0700371 bool HasWatchedFramePopListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
372 return have_watched_frame_pop_listeners_;
373 }
374
Alex Light9fb1ab12017-09-05 09:32:49 -0700375 bool HasExceptionHandledListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
376 return have_exception_handled_listeners_;
377 }
378
Mythri Alleab474882022-01-17 16:43:04 +0000379 bool NeedsSlowInterpreterForListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
380 return have_field_read_listeners_ ||
381 have_field_write_listeners_ ||
382 have_watched_frame_pop_listeners_ ||
383 have_exception_handled_listeners_;
Bill Buzbeefd522f92016-02-11 22:37:42 +0000384 }
385
Ian Rogers62d6c772013-02-27 08:32:07 -0800386 // Inform listeners that a method has been entered. A dex PC is provided as we may install
387 // listeners into executing code and get method enter events for methods already on the stack.
Mythri Alle9cc65df2021-09-21 15:09:58 +0000388 void MethodEnterEvent(Thread* thread, ArtMethod* method) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700389 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200390 if (UNLIKELY(HasMethodEntryListeners())) {
Mythri Alle9cc65df2021-09-21 15:09:58 +0000391 MethodEnterEventImpl(thread, method);
Ian Rogers62d6c772013-02-27 08:32:07 -0800392 }
393 }
394
395 // Inform listeners that a method has been exited.
Alex Lightb7c640d2019-03-20 15:52:13 -0700396 template<typename T>
Alex Lightd7661582017-05-01 13:48:16 -0700397 void MethodExitEvent(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -0700398 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -0700399 OptionalFrame frame,
400 T& return_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700401 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200402 if (UNLIKELY(HasMethodExitListeners())) {
Mythri Alle18fba4c2021-10-27 10:00:55 +0000403 MethodExitEventImpl(thread, method, frame, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800404 }
405 }
406
407 // Inform listeners that a method has been exited due to an exception.
Vladimir Marko19711d42019-04-12 14:05:34 +0100408 void MethodUnwindEvent(Thread* thread,
409 ObjPtr<mirror::Object> this_object,
410 ArtMethod* method,
411 uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700412 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800413
414 // Inform listeners that the dex pc has moved (only supported by the interpreter).
Vladimir Marko19711d42019-04-12 14:05:34 +0100415 void DexPcMovedEvent(Thread* thread,
416 ObjPtr<mirror::Object> this_object,
417 ArtMethod* method,
418 uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700419 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200420 if (UNLIKELY(HasDexPcListeners())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800421 DexPcMovedEventImpl(thread, this_object, method, dex_pc);
422 }
423 }
424
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000425 // Inform listeners that a branch has been taken (only supported by the interpreter).
426 void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700427 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000428 if (UNLIKELY(HasBranchListeners())) {
429 BranchImpl(thread, method, dex_pc, offset);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800430 }
431 }
432
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200433 // Inform listeners that we read a field (only supported by the interpreter).
Vladimir Marko19711d42019-04-12 14:05:34 +0100434 void FieldReadEvent(Thread* thread,
435 ObjPtr<mirror::Object> this_object,
436 ArtMethod* method,
437 uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700438 ArtField* field) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700439 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200440 if (UNLIKELY(HasFieldReadListeners())) {
441 FieldReadEventImpl(thread, this_object, method, dex_pc, field);
442 }
443 }
444
445 // Inform listeners that we write a field (only supported by the interpreter).
Vladimir Marko19711d42019-04-12 14:05:34 +0100446 void FieldWriteEvent(Thread* thread,
447 ObjPtr<mirror::Object> this_object,
448 ArtMethod* method,
449 uint32_t dex_pc,
450 ArtField* field,
451 const JValue& field_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700452 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200453 if (UNLIKELY(HasFieldWriteListeners())) {
454 FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
455 }
456 }
457
Alex Lighte814f9d2017-07-31 16:14:39 -0700458 // Inform listeners that a branch has been taken (only supported by the interpreter).
459 void WatchedFramePopped(Thread* thread, const ShadowFrame& frame) const
460 REQUIRES_SHARED(Locks::mutator_lock_) {
461 if (UNLIKELY(HasWatchedFramePopListeners())) {
462 WatchedFramePopImpl(thread, frame);
463 }
464 }
465
Alex Light6e1607e2017-08-23 10:06:18 -0700466 // Inform listeners that an exception was thrown.
Vladimir Marko19711d42019-04-12 14:05:34 +0100467 void ExceptionThrownEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700468 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800469
Alex Light9fb1ab12017-09-05 09:32:49 -0700470 // Inform listeners that an exception has been handled. This is not sent for native code or for
471 // exceptions which reach the end of the thread's stack.
Vladimir Marko19711d42019-04-12 14:05:34 +0100472 void ExceptionHandledEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
Alex Light9fb1ab12017-09-05 09:32:49 -0700473 REQUIRES_SHARED(Locks::mutator_lock_);
474
Mythri Alle5097f832021-11-02 14:52:30 +0000475 JValue GetReturnValue(Thread* self,
476 ArtMethod* method,
477 bool* is_ref,
478 uint64_t* gpr_result,
479 uint64_t* fpr_result) REQUIRES_SHARED(Locks::mutator_lock_);
480 bool ShouldDeoptimizeMethod(Thread* self, const NthCallerVisitor& visitor)
481 REQUIRES_SHARED(Locks::mutator_lock_);
482
Ian Rogers62d6c772013-02-27 08:32:07 -0800483 // Called when an instrumented method is entered. The intended link register (lr) is saved so
484 // that returning causes a branch to the method exit stub. Generates method enter events.
Vladimir Marko19711d42019-04-12 14:05:34 +0100485 void PushInstrumentationStackFrame(Thread* self,
486 ObjPtr<mirror::Object> this_object,
487 ArtMethod* method,
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000488 uintptr_t stack_pointer,
Vladimir Marko19711d42019-04-12 14:05:34 +0100489 uintptr_t lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700490 bool interpreter_entry)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700491 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800492
Mingyao Yang2ee17902017-08-30 11:37:08 -0700493 DeoptimizationMethodType GetDeoptimizationMethodType(ArtMethod* method)
494 REQUIRES_SHARED(Locks::mutator_lock_);
495
Ian Rogers62d6c772013-02-27 08:32:07 -0800496 // Called when an instrumented method is exited. Removes the pushed instrumentation frame
Alex Lightb7edcda2017-04-27 13:20:31 -0700497 // returning the intended link register. Generates method exit events. The gpr_result and
498 // fpr_result pointers are pointers to the locations where the integer/pointer and floating point
499 // result values of the function are stored. Both pointers must always be valid but the values
500 // held there will only be meaningful if interpreted as the appropriate type given the function
501 // being returned from.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000502 TwoWordReturn PopInstrumentationStackFrame(Thread* self,
503 uintptr_t* return_pc_addr,
504 uint64_t* gpr_result,
505 uint64_t* fpr_result)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800506 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Ian Rogers62d6c772013-02-27 08:32:07 -0800507
Alex Light2c8206f2018-06-08 14:51:09 -0700508 // Pops nframes instrumentation frames from the current thread. Returns the return pc for the last
509 // instrumentation frame that's popped.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000510 uintptr_t PopFramesForDeoptimization(Thread* self, uintptr_t stack_pointer) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700511 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800512
513 // Call back for configure stubs.
Vladimir Marko19711d42019-04-12 14:05:34 +0100514 void InstallStubsForClass(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800515 REQUIRES(!GetDeoptimizedMethodsLock());
jeffhao725a9572012-11-13 18:20:12 -0800516
Mathieu Chartiere401d142015-04-22 13:56:20 -0700517 void InstallStubsForMethod(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800518 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100519
Alex Light40607862019-05-06 18:16:24 +0000520 // Sets up instrumentation to allow single thread deoptimization using ForceInterpreterCount.
Nicolas Geoffraye91532e2021-12-10 09:52:18 +0000521 void EnableSingleThreadDeopt(const char* key)
Alex Light40607862019-05-06 18:16:24 +0000522 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
523 REQUIRES(!Locks::thread_list_lock_,
524 !Locks::classlinker_classes_lock_,
525 !GetDeoptimizedMethodsLock());
526
Mingyao Yang99170c62015-07-06 11:10:37 -0700527 // Install instrumentation exit stub on every method of the stack of the given thread.
Mythri Alle5097f832021-11-02 14:52:30 +0000528 // This is used by:
529 // - the debugger to cause a deoptimization of the all frames in thread's stack (for
530 // example, after updating local variables)
531 // - to call method entry / exit hooks for tracing. For this we instrument
532 // the stack frame to run entry / exit hooks but we don't need to deoptimize.
533 // deopt_all_frames indicates whether the frames need to deoptimize or not.
534 void InstrumentThreadStack(Thread* thread, bool deopt_all_frames) REQUIRES(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700535
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000536 // Force all currently running frames to be deoptimized back to interpreter. This should only be
537 // used in cases where basically all compiled code has been invalidated.
538 void DeoptimizeAllThreadFrames() REQUIRES(art::Locks::mutator_lock_);
539
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000540 static size_t ComputeFrameId(Thread* self,
541 size_t frame_depth,
542 size_t inlined_frames_before_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700543 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000544
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800545 // Does not hold lock, used to check if someone changed from not instrumented to instrumented
546 // during a GC suspend point.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700547 bool AllocEntrypointsInstrumented() const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier50e93312016-03-16 11:25:29 -0700548 return alloc_entrypoints_instrumented_;
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800549 }
550
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200551 InstrumentationLevel GetCurrentInstrumentationLevel() const;
552
Alex Lightdba61482016-12-21 08:20:29 -0800553 private:
554 // Returns true if moving to the given instrumentation level requires the installation of stubs.
555 // False otherwise.
556 bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
557
Mythri Alle5097f832021-11-02 14:52:30 +0000558 // Returns true if we need entry exit stub to call entry hooks. JITed code
559 // directly call entry / exit hooks and don't need the stub.
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +0000560 static bool CodeNeedsEntryExitStub(const void* code, ArtMethod* method)
561 REQUIRES_SHARED(Locks::mutator_lock_);
Mythri Alle5097f832021-11-02 14:52:30 +0000562
Mythri Alle519ff8b2021-11-17 13:47:07 +0000563 // Update the current instrumentation_level_.
564 void UpdateInstrumentationLevel(InstrumentationLevel level);
Mythri Alle9575c122021-11-12 12:04:41 +0000565
Ian Rogers62d6c772013-02-27 08:32:07 -0800566 // Does the job of installing or removing instrumentation code within methods.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200567 // In order to support multiple clients using instrumentation at the same time,
568 // the caller must pass a unique key (a string) identifying it so we remind which
569 // instrumentation level it needs. Therefore the current instrumentation level
570 // becomes the highest instrumentation level required by a client.
571 void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700572 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800573 REQUIRES(!GetDeoptimizedMethodsLock(),
Mathieu Chartieraa516822015-10-02 15:53:37 -0700574 !Locks::thread_list_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700575 !Locks::classlinker_classes_lock_);
Alex Light40607862019-05-06 18:16:24 +0000576 void UpdateStubs() REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
577 REQUIRES(!GetDeoptimizedMethodsLock(),
578 !Locks::thread_list_lock_,
579 !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800580
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200581
Mathieu Chartier661974a2014-01-09 11:23:53 -0800582 // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
583 // exclusive access to mutator lock which you can't get if the runtime isn't started.
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700584 void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier661974a2014-01-09 11:23:53 -0800585
Mythri Alle9cc65df2021-09-21 15:09:58 +0000586 void MethodEnterEventImpl(Thread* thread, ArtMethod* method) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700587 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightb7c640d2019-03-20 15:52:13 -0700588 template <typename T>
Alex Lightd7661582017-05-01 13:48:16 -0700589 void MethodExitEventImpl(Thread* thread,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700590 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -0700591 OptionalFrame frame,
592 T& return_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700593 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700594 void DexPcMovedEventImpl(Thread* thread,
595 ObjPtr<mirror::Object> this_object,
596 ArtMethod* method,
597 uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700598 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000599 void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700600 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700601 void WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const
602 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700603 void FieldReadEventImpl(Thread* thread,
604 ObjPtr<mirror::Object> this_object,
605 ArtMethod* method,
606 uint32_t dex_pc,
607 ArtField* field) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700608 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700609 void FieldWriteEventImpl(Thread* thread,
610 ObjPtr<mirror::Object> this_object,
611 ArtMethod* method,
612 uint32_t dex_pc,
613 ArtField* field,
614 const JValue& field_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700615 REQUIRES_SHARED(Locks::mutator_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800616
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700617 // Read barrier-aware utility functions for accessing deoptimized_methods_
Mathieu Chartiere401d142015-04-22 13:56:20 -0700618 bool AddDeoptimizedMethod(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800619 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700620 bool IsDeoptimizedMethod(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800621 REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700622 bool RemoveDeoptimizedMethod(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800623 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700624 ArtMethod* BeginDeoptimizedMethod()
Andreas Gampe7e56a072018-11-29 10:40:06 -0800625 REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
Mythri Alleab474882022-01-17 16:43:04 +0000626 bool IsDeoptimizedMethodsEmptyLocked() const
Andreas Gampe7e56a072018-11-29 10:40:06 -0800627 REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
Nicolas Geoffray854af032021-12-21 08:32:42 +0000628 void UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800629 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700630
Andreas Gampe7e56a072018-11-29 10:40:06 -0800631 ReaderWriterMutex* GetDeoptimizedMethodsLock() const {
632 return deoptimized_methods_lock_.get();
633 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700634
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000635 // A counter that's incremented every time a DeoptimizeAllFrames. We check each
636 // InstrumentationStackFrames creation id against this number and if they differ we deopt even if
637 // we could otherwise continue running.
638 uint64_t current_force_deopt_id_ GUARDED_BY(Locks::mutator_lock_);
639
Brian Carlstromea46f952013-07-30 01:26:50 -0700640 // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
Ian Rogers62d6c772013-02-27 08:32:07 -0800641 bool instrumentation_stubs_installed_;
642
Mythri Alle9575c122021-11-12 12:04:41 +0000643 // The required level of instrumentation. This could be one of the following values:
644 // kInstrumentNothing: no instrumentation support is needed
645 // kInstrumentWithInstrumentationStubs: needs support to call method entry/exit stubs.
646 // kInstrumentWithInterpreter: only execute with interpreter
647 Instrumentation::InstrumentationLevel instrumentation_level_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800648
649 // Did the runtime request we only run in the interpreter? ie -Xint mode.
650 bool forced_interpret_only_;
651
652 // Do we have any listeners for method entry events? Short-cut to avoid taking the
653 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200654 bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800655
656 // Do we have any listeners for method exit events? Short-cut to avoid taking the
657 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200658 bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800659
660 // Do we have any listeners for method unwind events? Short-cut to avoid taking the
661 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200662 bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800663
664 // Do we have any listeners for dex move events? Short-cut to avoid taking the
665 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200666 bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800667
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200668 // Do we have any listeners for field read events? Short-cut to avoid taking the
669 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200670 bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200671
672 // Do we have any listeners for field write events? Short-cut to avoid taking the
673 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200674 bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200675
Alex Light6e1607e2017-08-23 10:06:18 -0700676 // Do we have any exception thrown listeners? Short-cut to avoid taking the instrumentation_lock_.
677 bool have_exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800678
Alex Lighte814f9d2017-07-31 16:14:39 -0700679 // Do we have any frame pop listeners? Short-cut to avoid taking the instrumentation_lock_.
680 bool have_watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
681
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000682 // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
683 bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800684
Alex Light9fb1ab12017-09-05 09:32:49 -0700685 // Do we have any exception handled listeners? Short-cut to avoid taking the
686 // instrumentation_lock_.
687 bool have_exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
688
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200689 // Contains the instrumentation level required by each client of the instrumentation identified
690 // by a string key.
Vladimir Marko4f990712021-07-14 12:45:13 +0100691 using InstrumentationLevelTable = SafeMap<const char*, InstrumentationLevel>;
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200692 InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
693
Ian Rogers62d6c772013-02-27 08:32:07 -0800694 // The event listeners, written to with the mutator_lock_ exclusively held.
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000695 // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
696 // added or removed while iterating. The modifying thread holds exclusive lock,
697 // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
698 // do keep iterators that need to remain valid. This is the reason these listeners are std::list
699 // and not for example std::vector: the existing storage for a std::list does not move.
700 // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
701 // listeners can also be deleted concurrently.
702 // As a result, these lists are never trimmed. That's acceptable given the low number of
703 // listeners we have.
Ian Rogers62d6c772013-02-27 08:32:07 -0800704 std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
705 std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
706 std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000707 std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000708 std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
709 std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
710 std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
Alex Light6e1607e2017-08-23 10:06:18 -0700711 std::list<InstrumentationListener*> exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700712 std::list<InstrumentationListener*> watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
Alex Light9fb1ab12017-09-05 09:32:49 -0700713 std::list<InstrumentationListener*> exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800714
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100715 // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
716 // only.
Andreas Gampe7e56a072018-11-29 10:40:06 -0800717 mutable std::unique_ptr<ReaderWriterMutex> deoptimized_methods_lock_ BOTTOM_MUTEX_ACQUIRED_AFTER;
718 std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100719
Ian Rogersfa824272013-11-05 16:12:57 -0800720 // Current interpreter handler table. This is updated each time the thread state flags are
721 // modified.
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200722
Ian Rogersfa824272013-11-05 16:12:57 -0800723 // Greater than 0 if quick alloc entry points instrumented.
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800724 size_t quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier50e93312016-03-16 11:25:29 -0700725
726 // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
727 // to prevent races with the GC where the GC relies on thread suspension only see
728 // alloc_entrypoints_instrumented_ change during suspend points.
729 bool alloc_entrypoints_instrumented_;
730
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200731 friend class InstrumentationTest; // For GetCurrentInstrumentationLevel and ConfigureStubs.
Alex Light2c8206f2018-06-08 14:51:09 -0700732 friend class InstrumentationStackPopper; // For popping instrumentation frames.
Mythri Alle5097f832021-11-02 14:52:30 +0000733 friend void InstrumentationInstallStack(Thread*, void*, bool);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200734
jeffhao725a9572012-11-13 18:20:12 -0800735 DISALLOW_COPY_AND_ASSIGN(Instrumentation);
736};
Vladimir Marko9974e3c2020-06-10 16:27:06 +0100737std::ostream& operator<<(std::ostream& os, Instrumentation::InstrumentationEvent rhs);
738std::ostream& operator<<(std::ostream& os, Instrumentation::InstrumentationLevel rhs);
jeffhao725a9572012-11-13 18:20:12 -0800739
Ian Rogers62d6c772013-02-27 08:32:07 -0800740// An element in the instrumentation side stack maintained in art::Thread.
741struct InstrumentationStackFrame {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700742 InstrumentationStackFrame(mirror::Object* this_object,
743 ArtMethod* method,
744 uintptr_t return_pc,
745 size_t frame_id,
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000746 bool interpreter_entry,
747 uint64_t force_deopt_id)
Mingyao Yang2ee17902017-08-30 11:37:08 -0700748 : this_object_(this_object),
749 method_(method),
750 return_pc_(return_pc),
751 frame_id_(frame_id),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000752 interpreter_entry_(interpreter_entry),
753 force_deopt_id_(force_deopt_id) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800754 }
755
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700756 std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800757
758 mirror::Object* this_object_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700759 ArtMethod* method_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100760 uintptr_t return_pc_;
761 size_t frame_id_;
762 bool interpreter_entry_;
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000763 uint64_t force_deopt_id_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800764};
765
766} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800767} // namespace art
768
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700769#endif // ART_RUNTIME_INSTRUMENTATION_H_