blob: c2de71829eefd398b2126f44ffea8ccccfa10a8c [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#ifndef ART_RUNTIME_ART_METHOD_H_
18#define ART_RUNTIME_ART_METHOD_H_
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019
Andreas Gampe479b1de2016-07-19 18:27:17 -070020#include <cstddef>
Mathieu Chartier7f8678e2019-08-30 16:22:28 -070021#include <limits>
Andreas Gampe479b1de2016-07-19 18:27:17 -070022
Andreas Gampe57943812017-12-06 21:39:13 -080023#include <android-base/logging.h>
Andreas Gampe7458a7a2019-01-02 10:32:11 -080024#include <jni.h>
Andreas Gampe57943812017-12-06 21:39:13 -080025
Mathieu Chartier210531f2018-01-12 10:15:51 -080026#include "base/array_ref.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010027#include "base/bit_utils.h"
Vladimir Marko05792b92015-08-03 11:56:49 +010028#include "base/casts.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070029#include "base/enums.h"
Ulya Trafimovich8082cf92022-04-07 12:47:13 +010030#include "base/logging.h"
Andreas Gampe57943812017-12-06 21:39:13 -080031#include "base/macros.h"
Andreas Gampedcc528d2017-12-07 13:37:10 -080032#include "base/runtime_debug.h"
Andreas Gampe3f1dcd32018-12-28 09:39:56 -080033#include "dex/dex_file_structs.h"
David Sehr8c0961f2018-01-23 16:11:38 -080034#include "dex/modifiers.h"
David Sehr67bf42e2018-02-26 16:43:04 -080035#include "dex/primitive.h"
Nicolas Geoffray61673dc2021-11-06 13:58:31 +000036#include "interpreter/mterp/nterp.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070037#include "gc_root.h"
Mathieu Chartier28357fa2016-10-18 16:27:40 -070038#include "obj_ptr.h"
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070039#include "offsets.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070040#include "read_barrier_option.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080041
42namespace art {
43
Andreas Gampefe613a72019-05-01 15:54:20 -070044class CodeItemDataAccessor;
45class CodeItemDebugInfoAccessor;
46class CodeItemInstructionAccessor;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -080047class DexFile;
David Sehr9323e6e2016-09-13 08:58:35 -070048template<class T> class Handle;
Andreas Gampe75a7db62016-09-26 12:04:26 -070049class ImtConflictTable;
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070050enum InvokeType : uint32_t;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080051union JValue;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010052class OatQuickMethodHeader;
Nicolas Geoffray5550ca82015-08-21 18:38:30 +010053class ProfilingInfo;
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -070054class ScopedObjectAccessAlreadyRunnable;
Jeff Hao16743632013-05-08 10:59:04 -070055class ShadowFrame;
Andreas Gampeb79674c2019-05-14 16:16:46 -070056class Signature;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080057
58namespace mirror {
Mathieu Chartiere401d142015-04-22 13:56:20 -070059class Array;
60class Class;
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070061class ClassLoader;
62class DexCache;
Mathieu Chartiere42888f2016-04-14 10:49:19 -070063class IfTable;
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070064class Object;
65template <typename MirrorType> class ObjectArray;
Mathieu Chartiere401d142015-04-22 13:56:20 -070066class PointerArray;
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070067class String;
Mathieu Chartiere401d142015-04-22 13:56:20 -070068} // namespace mirror
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080069
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010070class ArtMethod final {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080071 public:
Andreas Gampeaea05c12017-05-19 08:45:02 -070072 // Should the class state be checked on sensitive operations?
73 DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
Andreas Gampeb1106e22017-02-23 11:34:48 -080074
Andreas Gampec6ea7d02017-02-01 16:46:28 -080075 // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
76 // constexpr, and ensure that the value is correct in art_method.cc.
77 static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
78
Nicolas Geoffray47171752020-08-31 15:03:20 +010079 ArtMethod() : access_flags_(0), dex_method_index_(0),
Nicolas Geoffray58f916c2021-11-15 14:02:07 +000080 method_index_(0), hotness_count_(0) { }
Mathieu Chartiere401d142015-04-22 13:56:20 -070081
Andreas Gampe542451c2016-07-26 09:02:02 -070082 ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
Nicolas Geoffray22cf3d32015-11-02 11:57:11 +000083 CopyFrom(src, image_pointer_size);
Mathieu Chartiere401d142015-04-22 13:56:20 -070084 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -070085
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -070086 static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
87 jobject jlr_method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070088 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62f05122014-03-21 11:21:29 -070089
Mathieu Chartiere7f75f32016-02-01 16:08:15 -080090 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Vladimir Markod93e3742018-07-18 10:58:13 +010091 ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080092
Mathieu Chartiere7f75f32016-02-01 16:08:15 -080093 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Vladimir Markod93e3742018-07-18 10:58:13 +010094 ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked()
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070095 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -070096
Mathieu Chartier8c19d242017-03-06 12:35:10 -080097 mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
98 return declaring_class_.AddressWithoutBarrier();
99 }
100
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700101 void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700102 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800103
Vladimir Markod93e3742018-07-18 10:58:13 +0100104 bool CASDeclaringClass(ObjPtr<mirror::Class> expected_class, ObjPtr<mirror::Class> desired_class)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700105 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier10e5ea92015-08-13 12:56:31 -0700106
David Srbecky56de89a2018-10-01 15:32:20 +0100107 static constexpr MemberOffset DeclaringClassOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700108 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800109 }
110
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000111 uint32_t GetAccessFlags() const {
Andreas Gampec6ea7d02017-02-01 16:46:28 -0800112 return access_flags_.load(std::memory_order_relaxed);
113 }
Jeff Hao5d917302013-02-27 17:57:33 -0800114
Mingyao Yang063fc772016-08-02 11:02:54 -0700115 // This version should only be called when it's certain there is no
116 // concurrency so there is no need to guarantee atomicity. For example,
117 // before the method is linked.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000118 void SetAccessFlags(uint32_t new_access_flags) REQUIRES_SHARED(Locks::mutator_lock_) {
Ulya Trafimovich8082cf92022-04-07 12:47:13 +0100119 // The following check ensures that we do not set `Intrinsics::kNone` (see b/228049006).
120 DCHECK_IMPLIES((new_access_flags & kAccIntrinsic) != 0,
121 (new_access_flags & kAccIntrinsicBits) != 0);
Mingyao Yang063fc772016-08-02 11:02:54 -0700122 access_flags_.store(new_access_flags, std::memory_order_relaxed);
123 }
124
David Srbecky56de89a2018-10-01 15:32:20 +0100125 static constexpr MemberOffset AccessFlagsOffset() {
Vladimir Marko5122e6b2017-08-17 16:10:09 +0100126 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_));
127 }
128
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800129 // Approximate what kind of method call would be used for this method.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700130 InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800131
132 // Returns true if the method is declared public.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000133 bool IsPublic() const {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800134 return (GetAccessFlags() & kAccPublic) != 0;
135 }
136
137 // Returns true if the method is declared private.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000138 bool IsPrivate() const {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800139 return (GetAccessFlags() & kAccPrivate) != 0;
140 }
141
142 // Returns true if the method is declared static.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000143 bool IsStatic() const {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800144 return (GetAccessFlags() & kAccStatic) != 0;
145 }
146
Orion Hodson6c4921b2016-09-21 15:41:06 +0100147 // Returns true if the method is a constructor according to access flags.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000148 bool IsConstructor() const {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800149 return (GetAccessFlags() & kAccConstructor) != 0;
150 }
151
Orion Hodson6c4921b2016-09-21 15:41:06 +0100152 // Returns true if the method is a class initializer according to access flags.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000153 bool IsClassInitializer() const {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700154 return IsConstructor() && IsStatic();
155 }
156
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800157 // Returns true if the method is static, private, or a constructor.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000158 bool IsDirect() const {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800159 return IsDirect(GetAccessFlags());
160 }
161
162 static bool IsDirect(uint32_t access_flags) {
Andreas Gampecbc96b82015-09-30 20:05:24 +0000163 constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
164 return (access_flags & direct) != 0;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800165 }
166
167 // Returns true if the method is declared synchronized.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000168 bool IsSynchronized() const {
Andreas Gampecbc96b82015-09-30 20:05:24 +0000169 constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800170 return (GetAccessFlags() & synchonized) != 0;
171 }
172
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000173 bool IsFinal() const {
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100174 return (GetAccessFlags() & kAccFinal) != 0;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800175 }
176
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000177 bool IsIntrinsic() const {
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100178 return (GetAccessFlags() & kAccIntrinsic) != 0;
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100179 }
180
Mingyao Yang063fc772016-08-02 11:02:54 -0700181 ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100182
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000183 uint32_t GetIntrinsic() const {
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100184 static const int kAccFlagsShift = CTZ(kAccIntrinsicBits);
185 static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1),
186 "kAccIntrinsicBits are not continuous");
187 static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0,
188 "kAccIntrinsic overlaps kAccIntrinsicBits");
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100189 DCHECK(IsIntrinsic());
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100190 return (GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift;
191 }
192
David Brazdil904e75a2018-05-15 13:45:08 +0100193 void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100194
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000195 bool IsCopied() const {
Vladimir Markode0d0de2021-03-18 14:12:35 +0000196 // We do not have intrinsics for any default methods and therefore intrinsics are never copied.
197 // So we are using a flag from the intrinsic flags range and need to check `kAccIntrinsic` too.
198 static_assert((kAccCopied & kAccIntrinsicBits) != 0,
199 "kAccCopied deliberately overlaps intrinsic bits");
200 const bool copied = (GetAccessFlags() & (kAccIntrinsic | kAccCopied)) == kAccCopied;
Alex Light36121492016-02-22 13:43:29 -0800201 // (IsMiranda() || IsDefaultConflicting()) implies copied
202 DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
203 << "Miranda or default-conflict methods must always be copied.";
204 return copied;
Alex Lightfcea56f2016-02-17 11:59:05 -0800205 }
206
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000207 bool IsMiranda() const {
Vladimir Markode0d0de2021-03-18 14:12:35 +0000208 // Miranda methods are marked as copied and abstract but not default.
209 // We need to check the kAccIntrinsic too, see `IsCopied()`.
210 static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
211 static constexpr uint32_t kValue = kAccCopied | kAccAbstract;
212 return (GetAccessFlags() & kMask) == kValue;
213 }
214
215 // A default conflict method is a special sentinel method that stands for a conflict between
216 // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError
217 // if one attempts to do so.
218 bool IsDefaultConflicting() const {
219 // Default conflct methods are marked as copied, abstract and default.
220 // We need to check the kAccIntrinsic too, see `IsCopied()`.
221 static constexpr uint32_t kMask = kAccIntrinsic | kAccCopied | kAccAbstract | kAccDefault;
222 static constexpr uint32_t kValue = kAccCopied | kAccAbstract | kAccDefault;
223 return (GetAccessFlags() & kMask) == kValue;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800224 }
225
Alex Light9139e002015-10-09 15:59:48 -0700226 // Returns true if invoking this method will not throw an AbstractMethodError or
227 // IncompatibleClassChangeError.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000228 bool IsInvokable() const {
Vladimir Markode0d0de2021-03-18 14:12:35 +0000229 // Default conflicting methods are marked with `kAccAbstract` (as well as `kAccCopied`
230 // and `kAccDefault`) but they are not considered abstract, see `IsAbstract()`.
231 DCHECK_EQ((GetAccessFlags() & kAccAbstract) == 0, !IsDefaultConflicting() && !IsAbstract());
232 return (GetAccessFlags() & kAccAbstract) == 0;
Alex Light9139e002015-10-09 15:59:48 -0700233 }
234
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000235 bool IsPreCompiled() const {
Vladimir Marko3907ce02021-03-19 15:37:38 +0000236 // kAccCompileDontBother and kAccPreCompiled overlap with kAccIntrinsicBits.
Vladimir Marko3907ce02021-03-19 15:37:38 +0000237 static_assert((kAccCompileDontBother & kAccIntrinsicBits) != 0);
238 static_assert((kAccPreCompiled & kAccIntrinsicBits) != 0);
239 static constexpr uint32_t kMask = kAccIntrinsic | kAccCompileDontBother | kAccPreCompiled;
240 static constexpr uint32_t kValue = kAccCompileDontBother | kAccPreCompiled;
241 return (GetAccessFlags() & kMask) == kValue;
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100242 }
243
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000244 void SetPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100245 DCHECK(IsInvokable());
246 DCHECK(IsCompilable());
Nicolas Geoffraye33be3d2022-01-07 16:27:49 +0000247 // kAccPreCompiled and kAccCompileDontBother overlaps with kAccIntrinsicBits.
248 // We don't mark the intrinsics as precompiled, which means in JIT zygote
249 // mode, compiled code for intrinsics will not be shared, and apps will
250 // compile intrinsics themselves if needed.
251 if (IsIntrinsic()) {
252 return;
253 }
Nicolas Geoffray32384402019-07-17 20:06:44 +0100254 AddAccessFlags(kAccPreCompiled | kAccCompileDontBother);
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100255 }
256
Nicolas Geoffrayf9ae8e32022-02-15 22:54:11 +0000257 bool IsMemorySharedMethod() {
258 return (GetAccessFlags() & kAccMemorySharedMethod) != 0;
259 }
260
261 void SetMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray930070e2022-05-11 16:53:00 +0100262 // Disable until we make sure critical code is AOTed.
263 static constexpr bool kEnabledMemorySharedMethod = false;
264 if (kEnabledMemorySharedMethod && !IsIntrinsic() && !IsAbstract()) {
Nicolas Geoffrayf9ae8e32022-02-15 22:54:11 +0000265 AddAccessFlags(kAccMemorySharedMethod);
266 SetHotCounter();
267 }
268 }
269
Nicolas Geoffray0ca39d12022-04-27 14:11:23 +0100270 void ClearMemorySharedMethod() REQUIRES_SHARED(Locks::mutator_lock_) {
271 if (IsIntrinsic() || IsAbstract()) {
272 return;
273 }
274 if (IsMemorySharedMethod()) {
275 ClearAccessFlags(kAccMemorySharedMethod);
276 }
277 }
278
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000279 void ClearPreCompiled() REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray32384402019-07-17 20:06:44 +0100280 ClearAccessFlags(kAccPreCompiled | kAccCompileDontBother);
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100281 }
282
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000283 bool IsCompilable() const {
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100284 if (IsIntrinsic()) {
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100285 // kAccCompileDontBother overlaps with kAccIntrinsicBits.
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100286 return true;
287 }
Nicolas Geoffray32384402019-07-17 20:06:44 +0100288 if (IsPreCompiled()) {
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100289 return true;
290 }
Nicolas Geoffray250a3782016-04-20 16:27:53 +0100291 return (GetAccessFlags() & kAccCompileDontBother) == 0;
292 }
293
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000294 void ClearDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightc2d0c962019-10-23 14:14:25 -0700295 DCHECK(!IsMiranda());
296 ClearAccessFlags(kAccCompileDontBother);
297 }
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000298
299 void SetDontCompile() REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100300 DCHECK(!IsMiranda());
Alex Lightfcbafb32017-02-02 15:09:54 -0800301 AddAccessFlags(kAccCompileDontBother);
302 }
303
Alex Lighteb7c1442015-08-31 13:17:42 -0700304 // This is set by the class linker.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000305 bool IsDefault() const {
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100306 static_assert((kAccDefault & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100307 "kAccDefault conflicts with intrinsic modifier");
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100308 return (GetAccessFlags() & kAccDefault) != 0;
Alex Lighteb7c1442015-08-31 13:17:42 -0700309 }
310
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000311 bool IsObsolete() const {
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100312 return (GetAccessFlags() & kAccObsoleteMethod) != 0;
Alex Lighta01de592016-11-15 10:43:06 -0800313 }
314
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000315 void SetIsObsolete() REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightfcbafb32017-02-02 15:09:54 -0800316 AddAccessFlags(kAccObsoleteMethod);
Alex Lightdba61482016-12-21 08:20:29 -0800317 }
318
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000319 bool IsNative() const {
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100320 return (GetAccessFlags() & kAccNative) != 0;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800321 }
322
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100323 // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000324 bool IsFastNative() const {
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100325 // The presence of the annotation is checked by ClassLinker and recorded in access flags.
326 // The kAccFastNative flag value is used with a different meaning for non-native methods,
327 // so we need to check the kAccNative flag as well.
Andreas Gampecbc96b82015-09-30 20:05:24 +0000328 constexpr uint32_t mask = kAccFastNative | kAccNative;
Ian Rogers16ce0922014-01-10 14:59:36 -0800329 return (GetAccessFlags() & mask) == mask;
Ian Rogers1eb512d2013-10-18 15:42:20 -0700330 }
331
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100332 // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000333 bool IsCriticalNative() const {
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100334 // The presence of the annotation is checked by ClassLinker and recorded in access flags.
335 // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
336 // so we need to check the kAccNative flag as well.
337 constexpr uint32_t mask = kAccCriticalNative | kAccNative;
338 return (GetAccessFlags() & mask) == mask;
339 }
340
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000341 bool IsAbstract() const {
Vladimir Markode0d0de2021-03-18 14:12:35 +0000342 // Default confliciting methods have `kAccAbstract` set but they are not actually abstract.
343 return (GetAccessFlags() & kAccAbstract) != 0 && !IsDefaultConflicting();
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800344 }
345
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000346 bool IsSynthetic() const {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800347 return (GetAccessFlags() & kAccSynthetic) != 0;
348 }
349
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000350 bool IsVarargs() const {
Orion Hodsoncfa325e2016-10-13 10:25:54 +0100351 return (GetAccessFlags() & kAccVarargs) != 0;
352 }
353
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700354 bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800355
Orion Hodson81daf3d2020-09-03 14:01:51 +0100356 bool IsSignaturePolymorphic() REQUIRES_SHARED(Locks::mutator_lock_);
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100357
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000358 bool SkipAccessChecks() const {
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100359 // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
360 // so we need to check the kAccNative flag as well.
361 return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200362 }
363
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000364 void SetSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100365 // SkipAccessChecks() is applicable only to non-native methods.
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100366 DCHECK(!IsNative());
Mingyao Yang063fc772016-08-02 11:02:54 -0700367 AddAccessFlags(kAccSkipAccessChecks);
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200368 }
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000369 void ClearSkipAccessChecks() REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightb1eebde2019-10-22 16:30:47 +0000370 // SkipAccessChecks() is applicable only to non-native methods.
371 DCHECK(!IsNative());
372 ClearAccessFlags(kAccSkipAccessChecks);
373 }
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200374
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000375 bool PreviouslyWarm() const {
Vladimir Markodd446b12021-05-20 14:35:51 +0100376 // kAccPreviouslyWarm overlaps with kAccIntrinsicBits. Return true for intrinsics.
377 constexpr uint32_t mask = kAccPreviouslyWarm | kAccIntrinsic;
378 return (GetAccessFlags() & mask) != 0u;
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100379 }
380
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000381 void SetPreviouslyWarm() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100382 if (IsIntrinsic()) {
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100383 // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
384 return;
385 }
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100386 AddAccessFlags(kAccPreviouslyWarm);
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100387 }
388
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700389 // Should this method be run in the interpreter and count locks (e.g., failed structured-
390 // locking verification)?
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000391 bool MustCountLocks() const {
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100392 if (IsIntrinsic()) {
393 return false;
394 }
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700395 return (GetAccessFlags() & kAccMustCountLocks) != 0;
396 }
397
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000398 void ClearMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightc2d0c962019-10-23 14:14:25 -0700399 ClearAccessFlags(kAccMustCountLocks);
400 }
401
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000402 void SetMustCountLocks() REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100403 AddAccessFlags(kAccMustCountLocks);
Alex Lightb1eebde2019-10-22 16:30:47 +0000404 ClearAccessFlags(kAccSkipAccessChecks);
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100405 }
406
Nicolas Geoffray40cd07c2021-03-15 18:33:23 +0000407 bool HasNterpEntryPointFastPathFlag() const {
408 constexpr uint32_t mask = kAccNative | kAccNterpEntryPointFastPathFlag;
409 return (GetAccessFlags() & mask) == kAccNterpEntryPointFastPathFlag;
410 }
411
412 void SetNterpEntryPointFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
413 DCHECK(!IsNative());
414 AddAccessFlags(kAccNterpEntryPointFastPathFlag);
415 }
416
Nicolas Geoffray43c9cd72021-03-10 15:09:19 +0000417 void SetNterpInvokeFastPathFlag() REQUIRES_SHARED(Locks::mutator_lock_) {
418 AddAccessFlags(kAccNterpInvokeFastPathFlag);
419 }
420
Alex Lighteb7c1442015-08-31 13:17:42 -0700421 // Returns true if this method could be overridden by a default method.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700422 bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lighteb7c1442015-08-31 13:17:42 -0700423
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700424 bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800425
Alex Light9139e002015-10-09 15:59:48 -0700426 // Throws the error that would result from trying to invoke this method (i.e.
427 // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700428 void ThrowInvocationTimeError() REQUIRES_SHARED(Locks::mutator_lock_);
Alex Light9139e002015-10-09 15:59:48 -0700429
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700430 uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800431
Mathieu Chartier9f3629d2014-10-28 18:23:02 -0700432 // Doesn't do erroneous / unresolved class checks.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700433 uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -0700434
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700435 size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800436 return GetMethodIndex();
437 }
438
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700439 void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100440 // Not called within a transaction.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700441 method_index_ = new_method_index;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800442 }
443
David Srbecky56de89a2018-10-01 15:32:20 +0100444 static constexpr MemberOffset DexMethodIndexOffset() {
Andreas Gampe5d08fcc2017-06-05 17:56:46 -0700445 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
Vladimir Markoc1363122015-04-09 14:13:13 +0100446 }
447
David Srbecky56de89a2018-10-01 15:32:20 +0100448 static constexpr MemberOffset MethodIndexOffset() {
Andreas Gampe5d08fcc2017-06-05 17:56:46 -0700449 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800450 }
451
Nicolas Geoffraya00b54b2019-12-03 14:36:42 +0000452 static constexpr MemberOffset ImtIndexOffset() {
453 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, imt_index_));
454 }
455
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800456 // Number of 32bit registers that would be required to hold all the arguments
Vladimir Marko6e781582019-02-04 10:58:06 +0000457 static size_t NumArgRegisters(const char* shorty);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800458
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000459 ALWAYS_INLINE uint32_t GetDexMethodIndex() const {
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000460 return dex_method_index_;
461 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800462
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000463 void SetDexMethodIndex(uint32_t new_idx) REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100464 // Not called within a transaction.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700465 dex_method_index_ = new_idx;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800466 }
467
Vladimir Markod93e3742018-07-18 10:58:13 +0100468 // Lookup the Class from the type index into this method's dex cache.
Vladimir Markob45528c2017-07-27 14:14:28 +0100469 ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx)
470 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Markod93e3742018-07-18 10:58:13 +0100471 // Resolve the Class from the type index into this method's dex cache.
Vladimir Markob45528c2017-07-27 14:14:28 +0100472 ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700473 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogersa0485602014-12-02 15:48:04 -0800474
Alex Light6c8467f2015-11-20 15:03:26 -0800475 // Returns true if this method has the same name and signature of the other method.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700476 bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
Alex Light6c8467f2015-11-20 15:03:26 -0800477
Ian Rogerse0a02da2014-12-02 14:10:53 -0800478 // Find the method that this method overrides.
Andreas Gampe542451c2016-07-26 09:02:02 -0700479 ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700480 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800481
Ian Rogerse0a02da2014-12-02 14:10:53 -0800482 // Find the method index for this method within other_dexfile. If this method isn't present then
Andreas Gampee2abbc62017-09-15 11:59:26 -0700483 // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
Ian Rogerse0a02da2014-12-02 14:10:53 -0800484 // name and signature in the other_dexfile, such as the method index used to resolve this method
485 // in the other_dexfile.
486 uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
487 uint32_t name_and_signature_idx)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700488 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogerse0a02da2014-12-02 14:10:53 -0800489
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700490 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700491 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800492
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000493 const void* GetEntryPointFromQuickCompiledCode() const {
Andreas Gampe542451c2016-07-26 09:02:02 -0700494 return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800495 }
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000496 ALWAYS_INLINE
497 const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) const {
Vladimir Marko05792b92015-08-03 11:56:49 +0100498 return GetNativePointer<const void*>(
Mathieu Chartier2d721012014-11-10 11:08:06 -0800499 EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800500 }
501
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000502 void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code)
503 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier2d721012014-11-10 11:08:06 -0800504 SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
Andreas Gampe542451c2016-07-26 09:02:02 -0700505 kRuntimePointerSize);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800506 }
Mathieu Chartier2d721012014-11-10 11:08:06 -0800507 ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000508 const void* entry_point_from_quick_compiled_code, PointerSize pointer_size)
509 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100510 SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
Andreas Gampe542451c2016-07-26 09:02:02 -0700511 entry_point_from_quick_compiled_code,
512 pointer_size);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800513 }
514
David Srbecky56de89a2018-10-01 15:32:20 +0100515 static constexpr MemberOffset DataOffset(PointerSize pointer_size) {
Mathieu Chartiereace4582014-11-24 18:29:54 -0800516 return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
Andreas Gampe542451c2016-07-26 09:02:02 -0700517 PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
Andreas Gampe75f08852016-07-19 08:06:07 -0700518 }
519
David Srbecky56de89a2018-10-01 15:32:20 +0100520 static constexpr MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
Andreas Gampe75f08852016-07-19 08:06:07 -0700521 return DataOffset(pointer_size);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800522 }
523
David Srbecky56de89a2018-10-01 15:32:20 +0100524 static constexpr MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
Mathieu Chartiereace4582014-11-24 18:29:54 -0800525 return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
Andreas Gampe542451c2016-07-26 09:02:02 -0700526 PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
527 * static_cast<size_t>(pointer_size));
Mathieu Chartier2d721012014-11-10 11:08:06 -0800528 }
529
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000530 ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) const {
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000531 DCHECK(IsRuntimeMethod());
Andreas Gampe75f08852016-07-19 08:06:07 -0700532 return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000533 }
534
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000535 ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size)
536 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe75f08852016-07-19 08:06:07 -0700537 DCHECK(IsRuntimeMethod());
538 SetDataPtrSize(table, pointer_size);
539 }
540
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +0300541 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Mingyao Yang063fc772016-08-02 11:02:54 -0700542 ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
543
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000544 ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl)
545 REQUIRES_SHARED(Locks::mutator_lock_) {
Mingyao Yang063fc772016-08-02 11:02:54 -0700546 DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
547 if (single_impl) {
548 AddAccessFlags(kAccSingleImplementation);
549 } else {
550 ClearAccessFlags(kAccSingleImplementation);
551 }
552 }
553
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000554 ALWAYS_INLINE bool HasSingleImplementationFlag() const {
555 return (GetAccessFlags() & kAccSingleImplementation) != 0;
556 }
557
Alex Light97e78032017-06-27 17:51:55 -0700558 // Takes a method and returns a 'canonical' one if the method is default (and therefore
559 // potentially copied from some other class). For example, this ensures that the debugger does not
560 // get confused as to which method we are in.
561 ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
562 REQUIRES_SHARED(Locks::mutator_lock_);
563
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100564 ArtMethod* GetSingleImplementation(PointerSize pointer_size);
Mingyao Yang063fc772016-08-02 11:02:54 -0700565
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000566 ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size)
567 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100568 DCHECK(!IsNative());
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +0300569 // Non-abstract method's single implementation is just itself.
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100570 DCHECK(IsAbstract());
Vladimir Markoc2262112022-02-02 12:25:57 +0000571 DCHECK(method == nullptr || method->IsInvokable());
Mingyao Yang063fc772016-08-02 11:02:54 -0700572 SetDataPtrSize(method, pointer_size);
573 }
574
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000575 void* GetEntryPointFromJni() const {
Andreas Gampe75f08852016-07-19 08:06:07 -0700576 DCHECK(IsNative());
Andreas Gampe542451c2016-07-26 09:02:02 -0700577 return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800578 }
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100579
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000580 ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) const {
Andreas Gampe75f08852016-07-19 08:06:07 -0700581 return GetDataPtrSize(pointer_size);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800582 }
583
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000584 void SetEntryPointFromJni(const void* entrypoint)
585 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko86c87522020-05-11 16:55:55 +0100586 // The resolution method also has a JNI entrypoint for direct calls from
587 // compiled code to the JNI dlsym lookup stub for @CriticalNative.
588 DCHECK(IsNative() || IsRuntimeMethod());
Andreas Gampe542451c2016-07-26 09:02:02 -0700589 SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800590 }
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100591
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000592 ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size)
593 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe75f08852016-07-19 08:06:07 -0700594 SetDataPtrSize(entrypoint, pointer_size);
595 }
596
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000597 ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) const {
Andreas Gampe75f08852016-07-19 08:06:07 -0700598 DCHECK(IsImagePointerSize(pointer_size));
599 return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
600 }
601
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000602 ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size)
603 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe75f08852016-07-19 08:06:07 -0700604 DCHECK(IsImagePointerSize(pointer_size));
605 SetNativePointer(DataOffset(pointer_size), data, pointer_size);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800606 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800607
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800608 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
609 // conventions for a method of managed code. Returns false for Proxy methods.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000610 ALWAYS_INLINE bool IsRuntimeMethod() const {
Roland Levillainfa854e42018-02-07 13:09:55 +0000611 return dex_method_index_ == kRuntimeMethodDexMethodIndex;
Andreas Gampec6ea7d02017-02-01 16:46:28 -0800612 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800613
Nicolas Geoffray47171752020-08-31 15:03:20 +0100614 bool HasCodeItem() REQUIRES_SHARED(Locks::mutator_lock_) {
615 return !IsRuntimeMethod() && !IsNative() && !IsProxyMethod() && !IsAbstract();
616 }
617
zhaoxuyang7156ea22022-01-10 13:58:11 +0800618 // We need to explicitly indicate whether the code item is obtained from the compact dex file,
619 // because in JVMTI, we obtain the code item from the standard dex file to update the method.
620 void SetCodeItem(const dex::CodeItem* code_item, bool is_compact_dex_code_item)
621 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray47171752020-08-31 15:03:20 +0100622
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800623 // Is this a hand crafted method used for something like describing callee saves?
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700624 bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800625
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700626 bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800627
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700628 bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700629
Ian Rogersc449aa82013-07-29 14:35:46 -0700630 // Find the catch block for the given exception type and dex_pc. When a catch block is found,
631 // indicates whether the found catch block is responsible for clearing the exception or whether
632 // a move-exception instruction is present.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700633 uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
634 bool* has_no_move_exception)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700635 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800636
Mathieu Chartierda7c6502015-07-23 16:01:26 -0700637 // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
Hiroshi Yamauchi7a62e672016-06-10 17:22:48 -0700638 template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename RootVisitorType>
Andreas Gampe542451c2016-07-26 09:02:02 -0700639 void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800640
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700641 const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700642
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700643 const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700644
Andreas Gampec6ea7d02017-02-01 16:46:28 -0800645 ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700646
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700647 const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700648
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700649 const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700650
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700651 ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700652
Eric Holkabdb4592019-05-16 08:33:12 -0700653 ALWAYS_INLINE std::string_view GetNameView() REQUIRES_SHARED(Locks::mutator_lock_);
654
Vladimir Marko18090d12018-06-01 16:53:12 +0100655 ObjPtr<mirror::String> ResolveNameString() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers6b14d552014-10-28 21:50:58 -0700656
Vladimir Markod4e07aa2021-10-04 12:56:13 +0100657 bool NameEquals(ObjPtr<mirror::String> name) REQUIRES_SHARED(Locks::mutator_lock_);
658
Andreas Gampe3f1dcd32018-12-28 09:39:56 -0800659 const dex::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700660
Vladimir Marko942fd312017-01-16 20:52:19 +0000661 bool IsResolvedTypeIdx(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700662
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700663 int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700664
Andreas Gampe3f1dcd32018-12-28 09:39:56 -0800665 const dex::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700666
Andreas Gampe3f1dcd32018-12-28 09:39:56 -0800667 const dex::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700668
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700669 const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700670
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700671 uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700672
Andreas Gampe3f1dcd32018-12-28 09:39:56 -0800673 const dex::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700674
Orion Hodson58143d22018-02-20 08:44:20 +0000675 ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_);
676
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700677 const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700678
Alex Lightd7661582017-05-01 13:48:16 -0700679 ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
680
Andreas Gampea5b09a62016-11-17 15:21:22 -0800681 const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700682 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700683
Vladimir Markob45528c2017-07-27 14:14:28 +0100684 // Lookup return type.
685 ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
686 // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx
687 // calling ResolveType this caused a large number of bugs at call sites.
688 ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogersded66a02014-10-28 18:12:55 -0700689
Vladimir Markoc524e9e2019-03-26 10:54:50 +0000690 ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700691
Mathieu Chartier137cdfa2017-01-26 14:03:11 -0800692 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Vladimir Markoc524e9e2019-03-26 10:54:50 +0000693 ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
694 ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700695
Vladimir Markod1ee20f2017-08-17 09:21:16 +0000696 ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
697 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe542451c2016-07-26 09:02:02 -0700698 ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700699 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700700
Alex Light4ba388a2017-01-27 10:26:49 -0800701 ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
702
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700703 // May cause thread suspension due to class resolution.
704 bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700705 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700706
Vladimir Marko14632852015-08-17 12:07:23 +0100707 // Size of an instance of this native class.
Vladimir Markob91402f2021-12-21 15:55:06 +0000708 static constexpr size_t Size(PointerSize pointer_size) {
Andreas Gampe479b1de2016-07-19 18:27:17 -0700709 return PtrSizedFieldsOffset(pointer_size) +
Andreas Gampe542451c2016-07-26 09:02:02 -0700710 (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800711 }
712
Vladimir Marko14632852015-08-17 12:07:23 +0100713 // Alignment of an instance of this native class.
Vladimir Markob91402f2021-12-21 15:55:06 +0000714 static constexpr size_t Alignment(PointerSize pointer_size) {
Vladimir Markocf36d492015-08-12 19:27:26 +0100715 // The ArtMethod alignment is the same as image pointer size. This differs from
Vladimir Marko14632852015-08-17 12:07:23 +0100716 // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
Andreas Gampe542451c2016-07-26 09:02:02 -0700717 return static_cast<size_t>(pointer_size);
Vladimir Markocf36d492015-08-12 19:27:26 +0100718 }
719
Andreas Gampe542451c2016-07-26 09:02:02 -0700720 void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700721 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700722
Nicolas Geoffray58f916c2021-11-15 14:02:07 +0000723 ALWAYS_INLINE void ResetCounter(uint16_t new_value);
Nicolas Geoffray61673dc2021-11-06 13:58:31 +0000724 ALWAYS_INLINE void UpdateCounter(int32_t new_samples);
725 ALWAYS_INLINE void SetHotCounter();
726 ALWAYS_INLINE bool CounterIsHot();
Nicolas Geoffray58f916c2021-11-15 14:02:07 +0000727 ALWAYS_INLINE bool CounterHasReached(uint16_t samples, uint16_t threshold);
Vladimir Markodd446b12021-05-20 14:35:51 +0100728 ALWAYS_INLINE uint16_t GetCounter();
Nicolas Geoffray58f916c2021-11-15 14:02:07 +0000729 ALWAYS_INLINE bool CounterHasChanged(uint16_t threshold);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100730
Mathieu Chartier7f8678e2019-08-30 16:22:28 -0700731 ALWAYS_INLINE static constexpr uint16_t MaxCounter() {
732 return std::numeric_limits<decltype(hotness_count_)>::max();
733 }
734
David Srbeckye36e7f22018-11-14 14:21:23 +0000735 ALWAYS_INLINE uint32_t GetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
Bill Buzbee1d011d92016-04-04 16:59:29 +0000736
David Srbeckye36e7f22018-11-14 14:21:23 +0000737 void CalculateAndSetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
Bill Buzbee1d011d92016-04-04 16:59:29 +0000738
David Srbecky56de89a2018-10-01 15:32:20 +0100739 static constexpr MemberOffset HotnessCountOffset() {
Nicolas Geoffray8d728322018-01-18 22:44:32 +0000740 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_));
741 }
742
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100743 // Returns the method header for the compiled code containing 'pc'. Note that runtime
744 // methods will return null for this method, as they are not oat based.
745 const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700746 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100747
Vladimir Marko97d7e1c2016-10-04 14:44:28 +0100748 // Get compiled code for the method, return null if no code exists.
749 const void* GetOatMethodQuickCode(PointerSize pointer_size)
750 REQUIRES_SHARED(Locks::mutator_lock_);
751
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000752 // Returns whether the method has any compiled code, JIT or AOT.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700753 bool HasAnyCompiledCode() REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000754
David Sehr709b0702016-10-13 09:12:37 -0700755 // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
756 // "a.b.C.m(II)V" (depending on the value of 'with_signature').
757 static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
758 REQUIRES_SHARED(Locks::mutator_lock_);
759 std::string PrettyMethod(bool with_signature = true)
760 REQUIRES_SHARED(Locks::mutator_lock_);
761 // Returns the JNI native function name for the non-overloaded method 'm'.
762 std::string JniShortName()
763 REQUIRES_SHARED(Locks::mutator_lock_);
764 // Returns the JNI native function name for the overloaded method 'm'.
765 std::string JniLongName()
766 REQUIRES_SHARED(Locks::mutator_lock_);
767
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800768 // Update entry points by passing them through the visitor.
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100769 template <typename Visitor>
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000770 ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size)
771 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800772
David Sehra49e0532017-08-25 08:05:29 -0700773 // Visit the individual members of an ArtMethod. Used by imgdiag.
774 // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
775 // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
776 template <typename VisitorFunc>
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000777 void VisitMembers(VisitorFunc& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehra49e0532017-08-25 08:05:29 -0700778 DCHECK(IsImagePointerSize(kRuntimePointerSize));
779 visitor(this, &declaring_class_, "declaring_class_");
780 visitor(this, &access_flags_, "access_flags_");
David Sehra49e0532017-08-25 08:05:29 -0700781 visitor(this, &dex_method_index_, "dex_method_index_");
782 visitor(this, &method_index_, "method_index_");
783 visitor(this, &hotness_count_, "hotness_count_");
784 visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
785 visitor(this,
786 &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
787 "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
788 }
789
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800790 // Returns the dex instructions of the code item for the art method. Returns an empty array for
791 // the null code item case.
792 ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions()
Mathieu Chartier69147f12017-11-06 20:02:24 -0800793 REQUIRES_SHARED(Locks::mutator_lock_);
794
David Sehr0225f8e2018-01-31 08:52:24 +0000795 // Returns the dex code item data section of the DexFile for the art method.
796 ALWAYS_INLINE CodeItemDataAccessor DexInstructionData()
797 REQUIRES_SHARED(Locks::mutator_lock_);
798
799 // Returns the dex code item debug info section of the DexFile for the art method.
800 ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo()
801 REQUIRES_SHARED(Locks::mutator_lock_);
802
Vladimir Marko4df2d802018-09-27 16:42:44 +0000803 GcRoot<mirror::Class>& DeclaringClassRoot() {
804 return declaring_class_;
805 }
806
Mathieu Chartier2d721012014-11-10 11:08:06 -0800807 protected:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800808 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
Ian Rogersef7d42f2014-01-06 12:55:46 -0800809 // The class we are a part of.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700810 GcRoot<mirror::Class> declaring_class_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800811
Ian Rogersef7d42f2014-01-06 12:55:46 -0800812 // Access flags; low 16 bits are defined by spec.
Mingyao Yang063fc772016-08-02 11:02:54 -0700813 // Getting and setting this flag needs to be atomic when concurrency is
814 // possible, e.g. after this method's class is linked. Such as when setting
815 // verifier flags and single-implementation flag.
816 std::atomic<std::uint32_t> access_flags_;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800817
818 /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
819
Ian Rogersef7d42f2014-01-06 12:55:46 -0800820 // Index into method_ids of the dex file associated with this method.
821 uint32_t dex_method_index_;
822
823 /* End of dex file fields. */
824
825 // Entry within a dispatch table for this method. For static/direct methods the index is into
826 // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
827 // ifTable.
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100828 uint16_t method_index_;
829
David Srbeckye36e7f22018-11-14 14:21:23 +0000830 union {
831 // Non-abstract methods: The hotness we measure for this method. Not atomic,
832 // as we allow missing increments: if the method is hot, we will see it eventually.
833 uint16_t hotness_count_;
Nicolas Geoffrayd5a86952021-01-19 10:35:54 +0000834 // Abstract methods: IMT index.
David Srbeckye36e7f22018-11-14 14:21:23 +0000835 uint16_t imt_index_;
836 };
Ian Rogersef7d42f2014-01-06 12:55:46 -0800837
Mathieu Chartiereace4582014-11-24 18:29:54 -0800838 // Fake padding field gets inserted here.
Mathieu Chartier2d721012014-11-10 11:08:06 -0800839
840 // Must be the last fields in the method.
Andreas Gampe479b1de2016-07-19 18:27:17 -0700841 struct PtrSizedFields {
Vladimir Markod1ee20f2017-08-17 09:21:16 +0000842 // Depending on the method type, the data is
843 // - native method: pointer to the JNI function registered to this method
844 // or a function to resolve the JNI function,
Vladimir Marko86c87522020-05-11 16:55:55 +0100845 // - resolution method: pointer to a function to resolve the method and
846 // the JNI function for @CriticalNative.
Vladimir Markod1ee20f2017-08-17 09:21:16 +0000847 // - conflict method: ImtConflictTable,
848 // - abstract/interface method: the single-implementation if any,
849 // - proxy method: the original interface method or constructor,
Nicolas Geoffray47171752020-08-31 15:03:20 +0100850 // - other methods: during AOT the code item offset, at runtime a pointer
851 // to the code item.
Andreas Gampe75f08852016-07-19 08:06:07 -0700852 void* data_;
Mathieu Chartier2d721012014-11-10 11:08:06 -0800853
854 // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
Elliott Hughes956af0f2014-12-11 14:34:28 -0800855 // the interpreter.
Mathieu Chartier2d721012014-11-10 11:08:06 -0800856 void* entry_point_from_quick_compiled_code_;
Mathieu Chartier2d721012014-11-10 11:08:06 -0800857 } ptr_sized_fields_;
858
Mathieu Chartier02e25112013-08-14 16:14:24 -0700859 private:
Alex Lightf2f1c9d2017-03-15 15:35:46 +0000860 uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
861
Andreas Gampe542451c2016-07-26 09:02:02 -0700862 static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
Andreas Gampe479b1de2016-07-19 18:27:17 -0700863 // Round up to pointer size for padding field. Tested in art_method.cc.
Andreas Gampe542451c2016-07-26 09:02:02 -0700864 return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
865 static_cast<size_t>(pointer_size));
Mathieu Chartiere401d142015-04-22 13:56:20 -0700866 }
867
Andreas Gampe75f08852016-07-19 08:06:07 -0700868 // Compare given pointer size to the image pointer size.
Andreas Gampe542451c2016-07-26 09:02:02 -0700869 static bool IsImagePointerSize(PointerSize pointer_size);
Andreas Gampe75f08852016-07-19 08:06:07 -0700870
Vladimir Markob45528c2017-07-27 14:14:28 +0100871 dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_);
872
Mathieu Chartiere401d142015-04-22 13:56:20 -0700873 template<typename T>
Andreas Gampe542451c2016-07-26 09:02:02 -0700874 ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
Vladimir Marko05792b92015-08-03 11:56:49 +0100875 static_assert(std::is_pointer<T>::value, "T must be a pointer type");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700876 const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -0700877 if (pointer_size == PointerSize::k32) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700878 return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
879 } else {
880 auto v = *reinterpret_cast<const uint64_t*>(addr);
Vladimir Marko05792b92015-08-03 11:56:49 +0100881 return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
Mathieu Chartiere401d142015-04-22 13:56:20 -0700882 }
883 }
884
885 template<typename T>
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000886 ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size)
887 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100888 static_assert(std::is_pointer<T>::value, "T must be a pointer type");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700889 const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -0700890 if (pointer_size == PointerSize::k32) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700891 uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
Vladimir Marko05792b92015-08-03 11:56:49 +0100892 *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700893 } else {
894 *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
895 }
Mathieu Chartier2d721012014-11-10 11:08:06 -0800896 }
897
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100898 static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
899 return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
Ulya Trafimovich8082cf92022-04-07 12:47:13 +0100900 ((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0) &&
901 ((modifier & kAccIntrinsicBits) != 0)); // b/228049006: ensure intrinsic is not `kNone`
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100902 }
903
904 static inline bool OverlapsIntrinsicBits(uint32_t modifier) {
905 return (modifier & kAccIntrinsicBits) != 0;
906 }
907
908 // This setter guarantees atomicity.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000909 void AddAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
Ulya Trafimovich8082cf92022-04-07 12:47:13 +0100910 DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
Hans Boehm65a01a72019-12-30 14:07:03 -0800911 // None of the readers rely ordering.
912 access_flags_.fetch_or(flag, std::memory_order_relaxed);
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100913 }
914
915 // This setter guarantees atomicity.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +0000916 void ClearAccessFlags(uint32_t flag) REQUIRES_SHARED(Locks::mutator_lock_) {
Ulya Trafimovich8082cf92022-04-07 12:47:13 +0100917 DCHECK_IMPLIES(IsIntrinsic(), !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
Hans Boehm65a01a72019-12-30 14:07:03 -0800918 access_flags_.fetch_and(~flag, std::memory_order_relaxed);
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100919 }
920
Eric Holkabdb4592019-05-16 08:33:12 -0700921 // Used by GetName and GetNameView to share common code.
922 const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_);
923
Mathieu Chartiere401d142015-04-22 13:56:20 -0700924 DISALLOW_COPY_AND_ASSIGN(ArtMethod); // Need to use CopyFrom to deal with 32 vs 64 bits.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800925};
926
Alex Lightd78ddec2017-04-18 15:20:38 -0700927class MethodCallback {
928 public:
929 virtual ~MethodCallback() {}
930
931 virtual void RegisterNativeMethod(ArtMethod* method,
932 const void* original_implementation,
933 /*out*/void** new_implementation)
934 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
935};
936
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800937} // namespace art
938
Mathieu Chartiere401d142015-04-22 13:56:20 -0700939#endif // ART_RUNTIME_ART_METHOD_H_