blob: 580ae03e947d9bfcd6e4d2705a2e33f57db208ce [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070016
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070017#include "class_linker.h"
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070018
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070019#include <unistd.h>
20
Alex Lighteb7c1442015-08-31 13:17:42 -070021#include <algorithm>
Brian Carlstromdbc05252011-09-09 01:59:59 -070022#include <deque>
Vladimir Markobf121912019-06-04 13:49:05 +010023#include <forward_list>
Ian Rogerscf7f1912014-10-22 22:06:39 -070024#include <iostream>
Vladimir Marko21300532017-01-24 18:06:55 +000025#include <map>
Ian Rogers700a4022014-05-19 16:49:03 -070026#include <memory>
Fred Shih381e4ca2014-08-25 17:24:27 -070027#include <queue>
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070028#include <string>
Andreas Gampe9f3928f2019-02-04 11:19:31 -080029#include <string_view>
Alex Lighteb7c1442015-08-31 13:17:42 -070030#include <tuple>
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070031#include <utility>
Elliott Hughes90a33692011-08-30 13:27:07 -070032#include <vector>
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070033
Andreas Gampe46ee31b2016-12-14 10:11:49 -080034#include "android-base/stringprintf.h"
35
Mathieu Chartierc7853442015-03-27 14:35:38 -070036#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "art_method-inl.h"
Vladimir Markobf121912019-06-04 13:49:05 +010038#include "barrier.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070039#include "base/arena_allocator.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080040#include "base/casts.h"
Andreas Gampe19f54162019-05-14 16:16:28 -070041#include "base/file_utils.h"
Vladimir Marko782fb712020-12-23 12:47:31 +000042#include "base/hash_map.h"
43#include "base/hash_set.h"
David Sehr67bf42e2018-02-26 16:43:04 -080044#include "base/leb128.h"
Elliott Hughes07ed66b2012-12-12 18:34:25 -080045#include "base/logging.h"
Eric Holk74584e62021-02-18 14:39:17 -080046#include "base/metrics/metrics.h"
Vladimir Markobf121912019-06-04 13:49:05 +010047#include "base/mutex-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080048#include "base/os.h"
49#include "base/quasi_atomic.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070050#include "base/scoped_arena_containers.h"
Narayan Kamathd1c606f2014-06-09 16:50:19 +010051#include "base/scoped_flock.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080052#include "base/stl_util.h"
Vladimir Markob9c29f62019-03-20 14:22:51 +000053#include "base/string_view_cpp20.h"
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -080054#include "base/systrace.h"
Vladimir Marko80afd022015-05-19 18:08:00 +010055#include "base/time_utils.h"
Elliott Hughes76160052012-12-12 16:31:20 -080056#include "base/unix_file/fd_file.h"
David Sehrc431b9d2018-03-02 12:01:51 -080057#include "base/utils.h"
Andreas Gampeb9aec2c2015-04-23 22:23:47 -070058#include "base/value_object.h"
Mingyao Yang063fc772016-08-02 11:02:54 -070059#include "cha.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080060#include "class_linker-inl.h"
Calin Juravle57d0acc2017-07-11 17:41:30 -070061#include "class_loader_utils.h"
Vladimir Marko5868ada2020-05-12 11:50:34 +010062#include "class_root-inl.h"
Mathieu Chartiere4275c02015-08-06 15:34:15 -070063#include "class_table-inl.h"
Vladimir Marko2b5eaa22013-12-13 13:59:30 +000064#include "compiler_callbacks.h"
Vladimir Marko606adb32018-04-05 14:49:24 +010065#include "debug_print.h"
Elliott Hughes4740cdf2011-12-07 14:07:12 -080066#include "debugger.h"
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -070067#include "dex/class_accessor-inl.h"
David Sehrb2ec9f52018-02-21 13:20:31 -080068#include "dex/descriptors_names.h"
David Sehr9e734c72018-01-04 17:56:19 -080069#include "dex/dex_file-inl.h"
70#include "dex/dex_file_exception_helpers.h"
71#include "dex/dex_file_loader.h"
Andreas Gampead1aa632019-01-02 10:30:54 -080072#include "dex/signature-inl.h"
David Sehr0225f8e2018-01-31 08:52:24 +000073#include "dex/utf.h"
Vladimir Marko5115a4d2019-10-17 14:56:47 +010074#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070075#include "entrypoints/runtime_asm_entrypoints.h"
Alex Light705ad492015-09-21 11:36:30 -070076#include "experimental_flags.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070077#include "gc/accounting/card_table-inl.h"
Mathieu Chartier03c1dd92016-03-07 16:13:54 -080078#include "gc/accounting/heap_bitmap-inl.h"
Chang Xingba17dbd2017-06-28 21:27:56 +000079#include "gc/accounting/space_bitmap-inl.h"
Andreas Gampe1c158a02017-07-13 17:26:19 -070080#include "gc/heap-visit-objects-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070081#include "gc/heap.h"
Mathieu Chartier1b1e31f2016-05-19 10:13:04 -070082#include "gc/scoped_gc_critical_section.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070083#include "gc/space/image_space.h"
Vladimir Marko8d6768d2017-03-14 10:13:21 +000084#include "gc/space/space-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070085#include "gc_root-inl.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -070086#include "handle_scope-inl.h"
Andreas Gampeaa120012018-03-28 16:23:24 -070087#include "hidden_api.h"
Mathieu Chartier4a26f172016-01-26 14:26:18 -080088#include "image-inl.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070089#include "imt_conflict_table.h"
90#include "imtable-inl.h"
Mathieu Chartier74ccee62018-10-10 10:30:29 -070091#include "intern_table-inl.h"
Ian Rogers64b6d142012-10-29 16:34:15 -070092#include "interpreter/interpreter.h"
Nicolas Geoffray0315efa2020-06-26 11:42:39 +010093#include "interpreter/mterp/nterp.h"
David Srbeckyfb3de3d2018-01-29 16:11:49 +000094#include "jit/debugger_interface.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080095#include "jit/jit.h"
96#include "jit/jit_code_cache.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010097#include "jni/java_vm_ext.h"
98#include "jni/jni_internal.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070099#include "linear_alloc.h"
Andreas Gampe8e0f0432018-10-24 13:38:03 -0700100#include "mirror/array-alloc-inl.h"
101#include "mirror/array-inl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +0000102#include "mirror/call_site.h"
Andreas Gampe70f5fd02018-10-24 19:58:37 -0700103#include "mirror/class-alloc-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800104#include "mirror/class-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700105#include "mirror/class.h"
Alex Lightd6251582016-10-31 11:12:30 -0700106#include "mirror/class_ext.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800107#include "mirror/class_loader.h"
Ian Rogers39ebcb82013-05-30 16:57:23 -0700108#include "mirror/dex_cache-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700109#include "mirror/dex_cache.h"
Narayan Kamath000e1882016-10-24 17:14:25 +0100110#include "mirror/emulated_stack_frame.h"
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700111#include "mirror/field.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800112#include "mirror/iftable-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700113#include "mirror/method.h"
Narayan Kamathafa48272016-08-03 12:46:58 +0100114#include "mirror/method_handle_impl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +0000115#include "mirror/method_handles_lookup.h"
Steven Morelande431e272017-07-18 16:53:49 -0700116#include "mirror/method_type.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800117#include "mirror/object-inl.h"
Chang Xingba17dbd2017-06-28 21:27:56 +0000118#include "mirror/object-refvisitor-inl.h"
Alex Lighta9bbc082019-11-14 14:51:41 -0800119#include "mirror/object.h"
Andreas Gampe52ecb652018-10-24 15:18:21 -0700120#include "mirror/object_array-alloc-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700121#include "mirror/object_array-inl.h"
Alex Light133987d2020-03-26 19:22:12 +0000122#include "mirror/object_array.h"
Chris Wailes0c61be42018-09-26 17:27:34 -0700123#include "mirror/object_reference.h"
124#include "mirror/object_reference-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800125#include "mirror/proxy.h"
Fred Shih4ee7a662014-07-11 09:59:27 -0700126#include "mirror/reference-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800127#include "mirror/stack_trace_element.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700128#include "mirror/string-inl.h"
Andreas Gampe501c3b02019-04-17 21:54:27 +0000129#include "mirror/throwable.h"
Orion Hodson005ac512017-10-24 15:43:43 +0100130#include "mirror/var_handle.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700131#include "native/dalvik_system_DexFile.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -0700132#include "nativehelper/scoped_local_ref.h"
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000133#include "nterp_helpers.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700134#include "oat.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700135#include "oat_file-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700136#include "oat_file.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700137#include "oat_file_assistant.h"
138#include "oat_file_manager.h"
139#include "object_lock.h"
David Sehr82d046e2018-04-23 08:14:19 -0700140#include "profile/profile_compilation_info.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -0700141#include "runtime.h"
Andreas Gampeac30fa22017-01-18 21:02:36 -0800142#include "runtime_callbacks.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -0700143#include "scoped_thread_state_change-inl.h"
Ian Rogers7b078e82014-09-10 14:44:24 -0700144#include "thread-inl.h"
Alex Light133987d2020-03-26 19:22:12 +0000145#include "thread.h"
Mingyao Yang063fc772016-08-02 11:02:54 -0700146#include "thread_list.h"
Mathieu Chartier7778b882015-10-05 16:41:10 -0700147#include "trace.h"
Vladimir Markob68bb7a2020-03-17 10:55:25 +0000148#include "transaction.h"
Nicolas Geoffray6df45112021-02-07 21:51:58 +0000149#include "vdex_file.h"
Andreas Gampea43ba3d2019-03-13 15:49:20 -0700150#include "verifier/class_verifier.h"
Nicolas Geoffray80789962021-04-30 16:50:39 +0100151#include "verifier/verifier_deps.h"
Elliott Hugheseac76672012-05-24 21:56:51 -0700152#include "well_known_classes.h"
Carl Shapiro0e5d75d2011-07-06 18:28:37 -0700153
154namespace art {
155
Andreas Gampe46ee31b2016-12-14 10:11:49 -0800156using android::base::StringPrintf;
157
Orion Hodson5880c772020-07-28 20:12:08 +0100158static constexpr bool kCheckImageObjects = kIsDebugBuild;
Mathieu Chartier8790c7f2016-03-31 15:05:45 -0700159static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700160
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700161static void ThrowNoClassDefFoundError(const char* fmt, ...)
162 __attribute__((__format__(__printf__, 1, 2)))
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700163 REQUIRES_SHARED(Locks::mutator_lock_);
Elliott Hughes0512f022012-03-15 22:10:52 -0700164static void ThrowNoClassDefFoundError(const char* fmt, ...) {
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700165 va_list args;
166 va_start(args, fmt);
Ian Rogers62d6c772013-02-27 08:32:07 -0800167 Thread* self = Thread::Current();
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000168 self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
Ian Rogerscab01012012-01-10 17:35:46 -0800169 va_end(args);
170}
171
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100172static ObjPtr<mirror::Object> GetErroneousStateError(ObjPtr<mirror::Class> c)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700173 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightd6251582016-10-31 11:12:30 -0700174 ObjPtr<mirror::ClassExt> ext(c->GetExtData());
175 if (ext == nullptr) {
176 return nullptr;
177 } else {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100178 return ext->GetErroneousStateError();
Alex Lightd6251582016-10-31 11:12:30 -0700179 }
180}
181
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100182static bool IsVerifyError(ObjPtr<mirror::Object> obj)
Alex Lightd6251582016-10-31 11:12:30 -0700183 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100184 // This is slow, but we only use it for rethrowing an error, and for DCHECK.
185 return obj->GetClass()->DescriptorEquals("Ljava/lang/VerifyError;");
186}
187
188// Helper for ThrowEarlierClassFailure. Throws the stored error.
189static void HandleEarlierErroneousStateError(Thread* self,
190 ClassLinker* class_linker,
191 ObjPtr<mirror::Class> c)
192 REQUIRES_SHARED(Locks::mutator_lock_) {
193 ObjPtr<mirror::Object> obj = GetErroneousStateError(c);
Andreas Gampe99babb62015-11-02 16:20:00 -0800194 DCHECK(obj != nullptr);
195 self->AssertNoPendingException();
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100196 DCHECK(!obj->IsClass());
197 ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
198 ObjPtr<mirror::Class> error_class = obj->GetClass();
199 CHECK(throwable_class->IsAssignableFrom(error_class));
200 self->SetException(obj->AsThrowable());
Andreas Gampe99babb62015-11-02 16:20:00 -0800201 self->AssertPendingException();
202}
203
Nicolas Geoffray66934ef2021-07-07 14:56:23 +0100204static void UpdateClassAfterVerification(Handle<mirror::Class> klass,
205 PointerSize pointer_size,
206 verifier::FailureKind failure_kind)
Andreas Gampe5b20b352018-10-11 19:03:20 -0700207 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray00391822019-12-10 10:17:23 +0000208 Runtime* runtime = Runtime::Current();
209 ClassLinker* class_linker = runtime->GetClassLinker();
Nicolas Geoffray2990b882021-11-30 15:48:49 +0000210 if (klass->IsVerified() && (failure_kind == verifier::FailureKind::kNoFailure)) {
Andreas Gampe5b20b352018-10-11 19:03:20 -0700211 klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +0100212 }
213
214 // Now that the class has passed verification, try to set nterp entrypoints
215 // to methods that currently use the switch interpreter.
216 if (interpreter::CanRuntimeUseNterp()) {
217 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +0000218 if (class_linker->IsQuickToInterpreterBridge(m.GetEntryPointFromQuickCompiledCode())) {
219 runtime->GetInstrumentation()->InitializeMethodsCode(&m, /*aot_code=*/nullptr);
220 }
Nicolas Geoffray00391822019-12-10 10:17:23 +0000221 }
Andreas Gampe5b20b352018-10-11 19:03:20 -0700222 }
223}
224
Vladimir Markobf121912019-06-04 13:49:05 +0100225// Callback responsible for making a batch of classes visibly initialized
226// after all threads have called it from a checkpoint, ensuring visibility.
227class ClassLinker::VisiblyInitializedCallback final
228 : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
229 public:
230 explicit VisiblyInitializedCallback(ClassLinker* class_linker)
231 : class_linker_(class_linker),
232 num_classes_(0u),
233 thread_visibility_counter_(0),
234 barriers_() {
235 std::fill_n(classes_, kMaxClasses, nullptr);
236 }
237
238 bool IsEmpty() const {
239 DCHECK_LE(num_classes_, kMaxClasses);
240 return num_classes_ == 0u;
241 }
242
243 bool IsFull() const {
244 DCHECK_LE(num_classes_, kMaxClasses);
245 return num_classes_ == kMaxClasses;
246 }
247
248 void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
249 DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
250 DCHECK(!IsFull());
251 classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
252 ++num_classes_;
253 }
254
255 void AddBarrier(Barrier* barrier) {
256 barriers_.push_front(barrier);
257 }
258
259 std::forward_list<Barrier*> GetAndClearBarriers() {
260 std::forward_list<Barrier*> result;
261 result.swap(barriers_);
262 result.reverse(); // Return barriers in insertion order.
263 return result;
264 }
265
266 void MakeVisible(Thread* self) {
267 DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
268 size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
269 AdjustThreadVisibilityCounter(self, count);
270 }
271
272 void Run(Thread* self) override {
273 self->ClearMakeVisiblyInitializedCounter();
274 AdjustThreadVisibilityCounter(self, -1);
275 }
276
277 private:
278 void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
279 ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
280 if (old + adjustment == 0) {
281 // All threads passed the checkpoint. Mark classes as visibly initialized.
282 {
283 ScopedObjectAccess soa(self);
284 StackHandleScope<1u> hs(self);
285 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
286 JavaVMExt* vm = self->GetJniEnv()->GetVm();
287 for (size_t i = 0, num = num_classes_; i != num; ++i) {
288 klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
289 vm->DeleteWeakGlobalRef(self, classes_[i]);
290 if (klass != nullptr) {
Vladimir Markobf121912019-06-04 13:49:05 +0100291 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100292 class_linker_->FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100293 }
294 }
295 num_classes_ = 0u;
296 }
297 class_linker_->VisiblyInitializedCallbackDone(self, this);
298 }
299 }
300
Vladimir Marko9f18fbc2019-07-31 15:06:12 +0100301 static constexpr size_t kMaxClasses = 16;
Vladimir Markobf121912019-06-04 13:49:05 +0100302
303 ClassLinker* const class_linker_;
304 size_t num_classes_;
305 jweak classes_[kMaxClasses];
306
307 // The thread visibility counter starts at 0 and it is incremented by the number of
308 // threads that need to run this callback (by the thread that request the callback
309 // to be run) and decremented once for each `Run()` execution. When it reaches 0,
310 // whether after the increment or after a decrement, we know that `Run()` was executed
311 // for all threads and therefore we can mark the classes as visibly initialized.
312 std::atomic<ssize_t> thread_visibility_counter_;
313
314 // List of barries to `Pass()` for threads that wait for the callback to complete.
315 std::forward_list<Barrier*> barriers_;
316};
317
318void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
319 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
320 return; // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
321 }
322 std::optional<Barrier> maybe_barrier; // Avoid constructing the Barrier for `wait == false`.
323 if (wait) {
324 maybe_barrier.emplace(0);
325 }
326 int wait_count = 0;
327 VisiblyInitializedCallback* callback = nullptr;
328 {
329 MutexLock lock(self, visibly_initialized_callback_lock_);
330 if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
331 callback = visibly_initialized_callback_.release();
332 running_visibly_initialized_callbacks_.push_front(*callback);
333 }
334 if (wait) {
335 DCHECK(maybe_barrier.has_value());
336 Barrier* barrier = std::addressof(*maybe_barrier);
337 for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
338 cb.AddBarrier(barrier);
339 ++wait_count;
340 }
341 }
342 }
343 if (callback != nullptr) {
344 callback->MakeVisible(self);
345 }
346 if (wait_count != 0) {
347 DCHECK(maybe_barrier.has_value());
348 maybe_barrier->Increment(self, wait_count);
349 }
350}
351
352void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
353 VisiblyInitializedCallback* callback) {
354 MutexLock lock(self, visibly_initialized_callback_lock_);
355 // Pass the barriers if requested.
356 for (Barrier* barrier : callback->GetAndClearBarriers()) {
357 barrier->Pass(self);
358 }
359 // Remove the callback from the list of running callbacks.
360 auto before = running_visibly_initialized_callbacks_.before_begin();
361 auto it = running_visibly_initialized_callbacks_.begin();
362 DCHECK(it != running_visibly_initialized_callbacks_.end());
363 while (std::addressof(*it) != callback) {
364 before = it;
365 ++it;
366 DCHECK(it != running_visibly_initialized_callbacks_.end());
367 }
368 running_visibly_initialized_callbacks_.erase_after(before);
369 // Reuse or destroy the callback object.
370 if (visibly_initialized_callback_ == nullptr) {
371 visibly_initialized_callback_.reset(callback);
372 } else {
373 delete callback;
374 }
375}
376
Alex Lightfb119572019-09-18 15:04:53 -0700377void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
378 ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
379 if (cb != nullptr) {
380 cb->MakeVisible(self);
381 }
382 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
383 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
384}
385
Vladimir Markobf121912019-06-04 13:49:05 +0100386ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
387 Thread* self, Handle<mirror::Class> klass) {
388 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
389 // Thanks to the x86 memory model, we do not need any memory fences and
390 // we can immediately mark the class as visibly initialized.
391 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100392 FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100393 return nullptr;
394 }
395 if (Runtime::Current()->IsActiveTransaction()) {
396 // Transactions are single-threaded, so we can mark the class as visibly intialized.
397 // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
398 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100399 FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100400 return nullptr;
401 }
402 mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
403 MutexLock lock(self, visibly_initialized_callback_lock_);
404 if (visibly_initialized_callback_ == nullptr) {
405 visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
406 }
407 DCHECK(!visibly_initialized_callback_->IsFull());
408 visibly_initialized_callback_->AddClass(self, klass.Get());
409
410 if (visibly_initialized_callback_->IsFull()) {
411 VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
412 running_visibly_initialized_callbacks_.push_front(*callback);
413 return callback;
414 } else {
415 return nullptr;
416 }
417}
418
Vladimir Marko86c87522020-05-11 16:55:55 +0100419const void* ClassLinker::RegisterNative(
420 Thread* self, ArtMethod* method, const void* native_method) {
421 CHECK(method->IsNative()) << method->PrettyMethod();
422 CHECK(native_method != nullptr) << method->PrettyMethod();
423 void* new_native_method = nullptr;
424 Runtime* runtime = Runtime::Current();
425 runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
426 native_method,
427 /*out*/&new_native_method);
428 if (method->IsCriticalNative()) {
429 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
430 // Remove old registered method if any.
431 auto it = critical_native_code_with_clinit_check_.find(method);
432 if (it != critical_native_code_with_clinit_check_.end()) {
433 critical_native_code_with_clinit_check_.erase(it);
434 }
435 // To ensure correct memory visibility, we need the class to be visibly
436 // initialized before we can set the JNI entrypoint.
437 if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
438 method->SetEntryPointFromJni(new_native_method);
439 } else {
440 critical_native_code_with_clinit_check_.emplace(method, new_native_method);
441 }
442 } else {
443 method->SetEntryPointFromJni(new_native_method);
444 }
445 return new_native_method;
446}
447
448void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
449 CHECK(method->IsNative()) << method->PrettyMethod();
450 // Restore stub to lookup native pointer via dlsym.
451 if (method->IsCriticalNative()) {
452 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
453 auto it = critical_native_code_with_clinit_check_.find(method);
454 if (it != critical_native_code_with_clinit_check_.end()) {
455 critical_native_code_with_clinit_check_.erase(it);
456 }
457 method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
458 } else {
459 method->SetEntryPointFromJni(GetJniDlsymLookupStub());
460 }
461}
462
463const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
464 if (method->IsCriticalNative()) {
465 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
466 auto it = critical_native_code_with_clinit_check_.find(method);
467 if (it != critical_native_code_with_clinit_check_.end()) {
468 return it->second;
469 }
470 const void* native_code = method->GetEntryPointFromJni();
471 return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
472 } else {
473 const void* native_code = method->GetEntryPointFromJni();
474 return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
475 }
476}
477
Andreas Gampe7b3063b2019-01-07 14:12:52 -0800478void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
479 bool wrap_in_no_class_def,
480 bool log) {
Elliott Hughes5c599942012-06-13 16:45:05 -0700481 // The class failed to initialize on a previous attempt, so we want to throw
482 // a NoClassDefFoundError (v2 2.17.5). The exception to this rule is if we
483 // failed in verification, in which case v2 5.4.1 says we need to re-throw
484 // the previous error.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800485 Runtime* const runtime = Runtime::Current();
486 if (!runtime->IsAotCompiler()) { // Give info if this occurs at runtime.
Andreas Gampe3d6b4702015-09-21 08:35:52 -0700487 std::string extra;
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100488 ObjPtr<mirror::Object> verify_error = GetErroneousStateError(c);
Vladimir Markobb206de2019-03-28 10:30:32 +0000489 if (verify_error != nullptr) {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100490 DCHECK(!verify_error->IsClass());
491 extra = verify_error->AsThrowable()->Dump();
Andreas Gampe3d6b4702015-09-21 08:35:52 -0700492 }
Andreas Gampe7b3063b2019-01-07 14:12:52 -0800493 if (log) {
494 LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
495 << ": " << extra;
496 }
Ian Rogers87e552d2012-08-31 15:54:48 -0700497 }
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700498
David Sehr709b0702016-10-13 09:12:37 -0700499 CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
Ian Rogers62d6c772013-02-27 08:32:07 -0800500 Thread* self = Thread::Current();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800501 if (runtime->IsAotCompiler()) {
Ian Rogers7b078e82014-09-10 14:44:24 -0700502 // At compile time, accurate errors and NCDFE are disabled to speed compilation.
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700503 ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000504 self->SetException(pre_allocated);
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700505 } else {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100506 ObjPtr<mirror::Object> erroneous_state_error = GetErroneousStateError(c);
507 if (erroneous_state_error != nullptr) {
Andreas Gampecb086952015-11-02 16:20:00 -0800508 // Rethrow stored error.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100509 HandleEarlierErroneousStateError(self, this, c);
Andreas Gampecb086952015-11-02 16:20:00 -0800510 }
Alex Lightd6251582016-10-31 11:12:30 -0700511 // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
512 // might have meant to go down the earlier if statement with the original error but it got
513 // swallowed by the OOM so we end up here.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100514 if (erroneous_state_error == nullptr ||
515 (wrap_in_no_class_def && !IsVerifyError(erroneous_state_error))) {
Andreas Gampecb086952015-11-02 16:20:00 -0800516 // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
517 // the top-level exception must be a NoClassDefFoundError. The potentially already pending
518 // exception will be a cause.
519 self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
David Sehr709b0702016-10-13 09:12:37 -0700520 c->PrettyDescriptor().c_str());
Ian Rogers7b078e82014-09-10 14:44:24 -0700521 }
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700522 }
523}
524
Brian Carlstromb23eab12014-10-08 17:55:21 -0700525static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700526 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstromb23eab12014-10-08 17:55:21 -0700527 if (VLOG_IS_ON(class_linker)) {
528 std::string temp;
529 LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000530 << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
Brian Carlstromb23eab12014-10-08 17:55:21 -0700531 }
532}
533
534static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700535 REQUIRES_SHARED(Locks::mutator_lock_) {
Elliott Hughesa4f94742012-05-29 16:28:38 -0700536 Thread* self = Thread::Current();
537 JNIEnv* env = self->GetJniEnv();
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700538
539 ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
Andreas Gampe2ed8def2014-08-28 14:41:02 -0700540 CHECK(cause.get() != nullptr);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700541
David Srbecky346fd962020-07-27 16:51:00 +0100542 // Boot classpath classes should not fail initialization. This is a consistency debug check.
543 // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
Andreas Gampe1e8a3952016-11-30 10:13:19 -0800544 if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
Andreas Gampe22f71d22016-11-21 10:10:08 -0800545 std::string tmp;
Alex Light5047d9f2018-03-09 15:44:31 -0800546 // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
547 // make sure to only do it if we don't have AsyncExceptions being thrown around since those
548 // could have caused the error.
549 bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
550 LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
551 << " failed initialization: "
552 << self->GetException()->Dump();
Andreas Gampe22f71d22016-11-21 10:10:08 -0800553 }
554
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700555 env->ExceptionClear();
Elliott Hughesa4f94742012-05-29 16:28:38 -0700556 bool is_error = env->IsInstanceOf(cause.get(), WellKnownClasses::java_lang_Error);
557 env->Throw(cause.get());
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700558
Elliott Hughesa4f94742012-05-29 16:28:38 -0700559 // We only wrap non-Error exceptions; an Error can just be used as-is.
560 if (!is_error) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000561 self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700562 }
Brian Carlstromb23eab12014-10-08 17:55:21 -0700563 VlogClassInitializationFailure(klass);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700564}
565
Andreas Gampe87658f32019-04-18 18:39:02 +0000566ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
Andreas Gampe2af99022017-04-25 08:32:59 -0700567 : boot_class_table_(new ClassTable()),
568 failed_dex_cache_class_lookups_(0),
Ian Rogers98379392014-02-24 16:53:16 -0800569 class_roots_(nullptr),
Ian Rogers98379392014-02-24 16:53:16 -0800570 find_array_class_cache_next_victim_(0),
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700571 init_done_(false),
Vladimir Marko1998cd02017-01-13 13:02:58 +0000572 log_new_roots_(false),
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700573 intern_table_(intern_table),
Andreas Gampe87658f32019-04-18 18:39:02 +0000574 fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
Vladimir Marko7dac8642019-11-06 17:09:30 +0000575 jni_dlsym_lookup_trampoline_(nullptr),
Vladimir Markofa458ac2020-02-12 14:08:07 +0000576 jni_dlsym_lookup_critical_trampoline_(nullptr),
Ian Rogers98379392014-02-24 16:53:16 -0800577 quick_resolution_trampoline_(nullptr),
Andreas Gampe2da88232014-02-27 12:26:20 -0800578 quick_imt_conflict_trampoline_(nullptr),
Vladimir Marko8a630572014-04-09 18:45:35 +0100579 quick_generic_jni_trampoline_(nullptr),
Mathieu Chartier2d721012014-11-10 11:08:06 -0800580 quick_to_interpreter_bridge_trampoline_(nullptr),
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000581 nterp_trampoline_(nullptr),
Andreas Gampec1ac9ee2017-07-24 22:35:49 -0700582 image_pointer_size_(kRuntimePointerSize),
Vladimir Markobf121912019-06-04 13:49:05 +0100583 visibly_initialized_callback_lock_("visibly initialized callback lock"),
584 visibly_initialized_callback_(nullptr),
Vladimir Marko86c87522020-05-11 16:55:55 +0100585 critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
586 critical_native_code_with_clinit_check_(),
Andreas Gampe7dface32017-07-25 21:32:59 -0700587 cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
588 // For CHA disabled during Aot, see b/34193647.
589
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700590 CHECK(intern_table_ != nullptr);
Andreas Gampe8ac75952015-06-02 21:01:45 -0700591 static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
592 "Array cache size wrong.");
593 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -0700594}
Brian Carlstroma663ea52011-08-19 23:33:41 -0700595
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800596void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700597 ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800598 if (c2 == nullptr) {
599 LOG(FATAL) << "Could not find class " << descriptor;
600 UNREACHABLE();
601 }
602 if (c1.Get() != c2) {
603 std::ostringstream os1, os2;
604 c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
605 c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
606 LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
607 << ". This is most likely the result of a broken build. Make sure that "
608 << "libcore and art projects match.\n\n"
609 << os1.str() << "\n\n" << os2.str();
610 UNREACHABLE();
611 }
612}
613
Vladimir Marko78f62d82022-01-10 16:25:19 +0000614ObjPtr<mirror::IfTable> AllocIfTable(Thread* self,
615 size_t ifcount,
616 ObjPtr<mirror::Class> iftable_class)
617 REQUIRES_SHARED(Locks::mutator_lock_) {
618 DCHECK(iftable_class->IsArrayClass());
619 DCHECK(iftable_class->GetComponentType()->IsObjectClass());
620 return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
621 mirror::IfTable::Alloc(self, iftable_class, ifcount * mirror::IfTable::kMax)));
622}
623
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800624bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
625 std::string* error_msg) {
Brian Carlstroma004aa92012-02-08 18:05:09 -0800626 VLOG(startup) << "ClassLinker::Init";
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -0700627
Mathieu Chartiere401d142015-04-22 13:56:20 -0700628 Thread* const self = Thread::Current();
629 Runtime* const runtime = Runtime::Current();
630 gc::Heap* const heap = runtime->GetHeap();
631
Jeff Haodcdc85b2015-12-04 14:06:18 -0800632 CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700633 CHECK(!init_done_);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -0700634
Mathieu Chartiere401d142015-04-22 13:56:20 -0700635 // Use the pointer size from the runtime since we are probably creating the image.
636 image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
637
Elliott Hughes30646832011-10-13 16:59:46 -0700638 // java_lang_Class comes first, it's needed for AllocClass
Mathieu Chartier590fee92013-09-13 13:46:47 -0700639 // The GC can't handle an object with a null class since we can't get the size of this object.
Mathieu Chartier1d27b342014-01-28 12:51:09 -0800640 heap->IncrementDisableMovingGC(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700641 StackHandleScope<64> hs(self); // 64 is picked arbitrarily.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700642 auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
Mathieu Chartierd7a7f2f2018-09-07 11:57:18 -0700643 // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
644 // the incorrect result when comparing to-space vs from-space.
Vladimir Markod7e9bbf2019-03-28 13:18:57 +0000645 Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
Vladimir Marko991cd5c2019-05-30 14:23:39 +0100646 heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800647 CHECK(java_lang_Class != nullptr);
Vladimir Marko317892b2018-05-31 11:11:32 +0100648 java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700649 java_lang_Class->SetClass(java_lang_Class.Get());
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -0700650 if (kUseBakerReadBarrier) {
651 java_lang_Class->AssertReadBarrierState();
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -0800652 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700653 java_lang_Class->SetClassSize(class_class_size);
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700654 java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
Mathieu Chartier1d27b342014-01-28 12:51:09 -0800655 heap->DecrementDisableMovingGC(self);
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700656 // AllocClass(ObjPtr<mirror::Class>) can now be used
Brian Carlstroma0808032011-07-18 00:39:23 -0700657
Elliott Hughes418d20f2011-09-22 14:00:39 -0700658 // Class[] is used for reflection support.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700659 auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700660 Handle<mirror::Class> class_array_class(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700661 AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700662 class_array_class->SetComponentType(java_lang_Class.Get());
Elliott Hughes418d20f2011-09-22 14:00:39 -0700663
Ian Rogers23435d02012-09-24 11:23:12 -0700664 // java_lang_Object comes next so that object_array_class can be created.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700665 Handle<mirror::Class> java_lang_Object(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700666 AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800667 CHECK(java_lang_Object != nullptr);
Ian Rogers23435d02012-09-24 11:23:12 -0700668 // backfill Object as the super class of Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700669 java_lang_Class->SetSuperClass(java_lang_Object.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000670 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
Brian Carlstroma0808032011-07-18 00:39:23 -0700671
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700672 java_lang_Object->SetObjectSize(sizeof(mirror::Object));
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800673 // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
674 // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
Vladimir Marko991cd5c2019-05-30 14:23:39 +0100675 runtime->SetSentinel(heap->AllocNonMovableObject(self,
676 java_lang_Object.Get(),
677 java_lang_Object->GetObjectSize(),
678 VoidFunctor()));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700679
Igor Murashkin86083f72017-10-27 10:59:04 -0700680 // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
Vladimir Marko305c38b2018-02-14 11:50:07 +0000681 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -0700682 // It might seem the lock here is unnecessary, however all the SubtypeCheck
683 // functions are annotated to require locks all the way down.
684 //
685 // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
686 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
Vladimir Marko38b8b252018-01-02 19:07:06 +0000687 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
688 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
Igor Murashkin86083f72017-10-27 10:59:04 -0700689 }
690
Ian Rogers23435d02012-09-24 11:23:12 -0700691 // Object[] next to hold class roots.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700692 Handle<mirror::Class> object_array_class(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700693 AllocClass(self, java_lang_Class.Get(),
694 mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700695 object_array_class->SetComponentType(java_lang_Object.Get());
Brian Carlstroma0808032011-07-18 00:39:23 -0700696
Roland Levillain0e840272018-08-23 19:55:30 +0100697 // Setup java.lang.String.
698 //
699 // We make this class non-movable for the unlikely case where it were to be
700 // moved by a sticky-bit (minor) collection when using the Generational
701 // Concurrent Copying (CC) collector, potentially creating a stale reference
702 // in the `klass_` field of one of its instances allocated in the Large-Object
703 // Space (LOS) -- see the comment about the dirty card scanning logic in
704 // art::gc::collector::ConcurrentCopying::MarkingPhase.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700705 Handle<mirror::Class> java_lang_String(hs.NewHandle(
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700706 AllocClass</* kMovable= */ false>(
Roland Levillain0e840272018-08-23 19:55:30 +0100707 self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700708 java_lang_String->SetStringClass();
Vladimir Marko2c64a832018-01-04 11:31:56 +0000709 mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
Jesse Wilson14150742011-07-29 19:04:44 -0400710
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700711 // Setup java.lang.ref.Reference.
Fred Shih4ee7a662014-07-11 09:59:27 -0700712 Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700713 AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
Fred Shih4ee7a662014-07-11 09:59:27 -0700714 java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000715 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
Fred Shih4ee7a662014-07-11 09:59:27 -0700716
Ian Rogers23435d02012-09-24 11:23:12 -0700717 // Create storage for root classes, save away our work so far (requires descriptors).
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700718 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100719 mirror::ObjectArray<mirror::Class>::Alloc(self,
720 object_array_class.Get(),
721 static_cast<int32_t>(ClassRoot::kMax)));
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700722 CHECK(!class_roots_.IsNull());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100723 SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
724 SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
725 SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
726 SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100727 SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
728 SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700729
Mathieu Chartier6beced42016-11-15 15:51:31 -0800730 // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
Vladimir Marko78f62d82022-01-10 16:25:19 +0000731 java_lang_Object->SetIfTable(AllocIfTable(self, 0, object_array_class.Get()));
Mathieu Chartier6beced42016-11-15 15:51:31 -0800732
Vladimir Marko02610552018-06-04 14:38:00 +0100733 // Create array interface entries to populate once we can load system classes.
Vladimir Marko78f62d82022-01-10 16:25:19 +0000734 object_array_class->SetIfTable(AllocIfTable(self, 2, object_array_class.Get()));
Vladimir Marko02610552018-06-04 14:38:00 +0100735 DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
736
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700737 // Setup the primitive type classes.
Vladimir Marko70e2a762019-07-12 16:49:00 +0100738 CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
739 CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
740 CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
741 CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
742 CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
743 CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
744 CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
745 CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
746 CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700747
Vladimir Marko70e2a762019-07-12 16:49:00 +0100748 // Allocate the primitive array classes. We need only the native pointer
749 // array at this point (int[] or long[], depending on architecture) but
750 // we shall perform the same setup steps for all primitive array classes.
751 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
752 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
753 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
754 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
755 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
756 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
757 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
758 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700759
Brian Carlstrom75cb3b42011-07-28 02:13:36 -0700760 // now that these are registered, we can use AllocClass() and AllocObjectArray
Brian Carlstroma0808032011-07-18 00:39:23 -0700761
Ian Rogers52813c92012-10-11 11:50:38 -0700762 // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700763 Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700764 AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100765 SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
Vladimir Marko05792b92015-08-03 11:56:49 +0100766 java_lang_DexCache->SetDexCacheClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700767 java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000768 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700769
Alex Lightd6251582016-10-31 11:12:30 -0700770
771 // Setup dalvik.system.ClassExt
772 Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
773 AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100774 SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000775 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
Alex Lightd6251582016-10-31 11:12:30 -0700776
Mathieu Chartier66f19252012-09-18 08:57:04 -0700777 // Set up array classes for string, field, method
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700778 Handle<mirror::Class> object_array_string(hs.NewHandle(
779 AllocClass(self, java_lang_Class.Get(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700780 mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700781 object_array_string->SetComponentType(java_lang_String.Get());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100782 SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700783
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000784 LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700785 // Create runtime resolution and imt conflict methods.
786 runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000787 runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
788 runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
Ian Rogers4445a7e2012-10-05 17:19:13 -0700789
Ian Rogers23435d02012-09-24 11:23:12 -0700790 // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
791 // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
792 // these roots.
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800793 if (boot_class_path.empty()) {
794 *error_msg = "Boot classpath is empty.";
795 return false;
796 }
Richard Uhlerfbef44d2014-12-23 09:48:51 -0800797 for (auto& dex_file : boot_class_path) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -0800798 if (dex_file == nullptr) {
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800799 *error_msg = "Null dex file.";
800 return false;
801 }
Mathieu Chartier0a19e212019-11-27 14:35:24 -0800802 AppendToBootClassPath(self, dex_file.get());
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800803 boot_dex_files_.push_back(std::move(dex_file));
Mathieu Chartier66f19252012-09-18 08:57:04 -0700804 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700805
806 // now we can use FindSystemClass
807
Dmitry Petrochenkof0972a42014-05-16 17:43:39 +0700808 // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
809 // we do not need friend classes or a publicly exposed setter.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700810 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800811 if (!runtime->IsAotCompiler()) {
Alex Light64ad14d2014-08-19 14:23:13 -0700812 // We need to set up the generic trampolines since we don't have an image.
Vladimir Marko7dac8642019-11-06 17:09:30 +0000813 jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
Vladimir Markofa458ac2020-02-12 14:08:07 +0000814 jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700815 quick_resolution_trampoline_ = GetQuickResolutionStub();
816 quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
Vladimir Marko7dac8642019-11-06 17:09:30 +0000817 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700818 quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000819 nterp_trampoline_ = interpreter::GetNterpEntryPoint();
Alex Light64ad14d2014-08-19 14:23:13 -0700820 }
Dmitry Petrochenkof0972a42014-05-16 17:43:39 +0700821
Alex Lightd6251582016-10-31 11:12:30 -0700822 // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
Vladimir Marko2c64a832018-01-04 11:31:56 +0000823 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800824 CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700825 CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000826 mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800827 CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
Vladimir Marko2c64a832018-01-04 11:31:56 +0000828 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800829 CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700830 CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000831 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
Alex Lightd6251582016-10-31 11:12:30 -0700832 CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
833 CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700834
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800835 // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
836 // in class_table_.
837 CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
Elliott Hughes418d20f2011-09-22 14:00:39 -0700838
Vladimir Marko70e2a762019-07-12 16:49:00 +0100839 // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
840 // arrays - can't be done until Object has a vtable and component classes are loaded.
841 FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
842 FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
843 FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
844 FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
845 FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
846 FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
847 FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
848 FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
849 FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
850 FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
851 FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700852
Ian Rogers23435d02012-09-24 11:23:12 -0700853 // Setup the single, global copy of "iftable".
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700854 auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800855 CHECK(java_lang_Cloneable != nullptr);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700856 auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800857 CHECK(java_io_Serializable != nullptr);
Ian Rogers23435d02012-09-24 11:23:12 -0700858 // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
859 // crawl up and explicitly list all of the supers as well.
Vladimir Marko02610552018-06-04 14:38:00 +0100860 object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
861 object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700862
Vladimir Markob10668c2021-06-10 09:52:53 +0100863 // Check Class[] and Object[]'s interfaces.
864 CHECK_EQ(java_lang_Cloneable.Get(), class_array_class->GetDirectInterface(0));
865 CHECK_EQ(java_io_Serializable.Get(), class_array_class->GetDirectInterface(1));
866 CHECK_EQ(java_lang_Cloneable.Get(), object_array_class->GetDirectInterface(0));
867 CHECK_EQ(java_io_Serializable.Get(), object_array_class->GetDirectInterface(1));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700868
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700869 CHECK_EQ(object_array_string.Get(),
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100870 FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
Brian Carlstrom1f870082011-08-23 16:02:11 -0700871
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800872 // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
Ian Rogers466bb252011-10-14 03:29:56 -0700873
Ian Rogers23435d02012-09-24 11:23:12 -0700874 // Create java.lang.reflect.Proxy root.
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100875 SetClassRoot(ClassRoot::kJavaLangReflectProxy,
876 FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
Ian Rogers466bb252011-10-14 03:29:56 -0700877
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700878 // Create java.lang.reflect.Field.class root.
Vladimir Markoacb906d2018-05-30 10:23:49 +0100879 ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700880 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100881 SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700882
883 // Create java.lang.reflect.Field array root.
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700884 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
885 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100886 SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700887
888 // Create java.lang.reflect.Constructor.class root and array root.
889 class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
890 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100891 SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700892 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
893 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100894 SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700895
896 // Create java.lang.reflect.Method.class root and array root.
897 class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
898 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100899 SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700900 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
901 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100902 SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700903
Orion Hodson005ac512017-10-24 15:43:43 +0100904 // Create java.lang.invoke.CallSite.class root
905 class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
906 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100907 SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100908
Narayan Kamathafa48272016-08-03 12:46:58 +0100909 // Create java.lang.invoke.MethodType.class root
910 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
911 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100912 SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
Narayan Kamathafa48272016-08-03 12:46:58 +0100913
914 // Create java.lang.invoke.MethodHandleImpl.class root
915 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
916 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100917 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
Vladimir Markoc7aa87e2018-05-24 15:19:52 +0100918 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
Narayan Kamathafa48272016-08-03 12:46:58 +0100919
Orion Hodsonc069a302017-01-18 09:23:12 +0000920 // Create java.lang.invoke.MethodHandles.Lookup.class root
921 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
922 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100923 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
Orion Hodsonc069a302017-01-18 09:23:12 +0000924
Orion Hodson005ac512017-10-24 15:43:43 +0100925 // Create java.lang.invoke.VarHandle.class root
926 class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
Orion Hodsonc069a302017-01-18 09:23:12 +0000927 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100928 SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100929
930 // Create java.lang.invoke.FieldVarHandle.class root
931 class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
932 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100933 SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100934
Orion Hodsondd411962021-06-25 08:55:22 +0100935 // Create java.lang.invoke.StaticFieldVarHandle.class root
936 class_root = FindSystemClass(self, "Ljava/lang/invoke/StaticFieldVarHandle;");
937 CHECK(class_root != nullptr);
938 SetClassRoot(ClassRoot::kJavaLangInvokeStaticFieldVarHandle, class_root);
939
Orion Hodson005ac512017-10-24 15:43:43 +0100940 // Create java.lang.invoke.ArrayElementVarHandle.class root
941 class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
942 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100943 SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100944
945 // Create java.lang.invoke.ByteArrayViewVarHandle.class root
946 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
947 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100948 SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100949
950 // Create java.lang.invoke.ByteBufferViewVarHandle.class root
951 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
952 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100953 SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
Orion Hodsonc069a302017-01-18 09:23:12 +0000954
Narayan Kamath000e1882016-10-24 17:14:25 +0100955 class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
956 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100957 SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
Narayan Kamath000e1882016-10-24 17:14:25 +0100958
Brian Carlstrom1f870082011-08-23 16:02:11 -0700959 // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
Fred Shih4ee7a662014-07-11 09:59:27 -0700960 // finish initializing Reference class
Vladimir Marko2c64a832018-01-04 11:31:56 +0000961 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800962 CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
Fred Shih4ee7a662014-07-11 09:59:27 -0700963 CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700964 CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
965 mirror::Reference::ClassSize(image_pointer_size_));
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700966 class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700967 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700968 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700969 class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700970 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700971 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700972 class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700973 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700974 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700975 class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700976 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700977 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700978
Ian Rogers23435d02012-09-24 11:23:12 -0700979 // Setup the ClassLoader, verifying the object_size_.
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700980 class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
Mathieu Chartiere4275c02015-08-06 15:34:15 -0700981 class_root->SetClassLoaderClass();
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700982 CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100983 SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700984
jeffhao8cd6dda2012-02-22 10:15:34 -0800985 // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
Ian Rogers23435d02012-09-24 11:23:12 -0700986 // java.lang.StackTraceElement as a convenience.
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100987 SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100988 SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
Brian Carlstromf3632832014-05-20 15:36:53 -0700989 FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100990 SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
991 FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
992 SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
Brian Carlstromf3632832014-05-20 15:36:53 -0700993 FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +0000994 SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
995 FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700996
Mathieu Chartiercdca4762016-04-28 09:44:54 -0700997 // Create conflict tables that depend on the class linker.
998 runtime->FixupConflictTables();
999
Ian Rogers98379392014-02-24 16:53:16 -08001000 FinishInit(self);
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001001
Brian Carlstroma004aa92012-02-08 18:05:09 -08001002 VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08001003
1004 return true;
Brian Carlstroma663ea52011-08-19 23:33:41 -07001005}
1006
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001007static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1008 REQUIRES_SHARED(Locks::mutator_lock_) {
1009 // Find String.<init> -> StringFactory bindings.
1010 ObjPtr<mirror::Class> string_factory_class =
1011 class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1012 CHECK(string_factory_class != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001013 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001014 WellKnownClasses::InitStringInit(string_class, string_factory_class);
1015 // Update the primordial thread.
1016 self->InitStringEntryPoints();
1017}
1018
Ian Rogers98379392014-02-24 16:53:16 -08001019void ClassLinker::FinishInit(Thread* self) {
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08001020 VLOG(startup) << "ClassLinker::FinishInit entering";
Brian Carlstrom16192862011-09-12 17:50:06 -07001021
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001022 CreateStringInitBindings(self, this);
1023
Brian Carlstrom16192862011-09-12 17:50:06 -07001024 // Let the heap know some key offsets into java.lang.ref instances
Elliott Hughes20cde902011-10-04 17:37:27 -07001025 // Note: we hard code the field indexes here rather than using FindInstanceField
Brian Carlstrom16192862011-09-12 17:50:06 -07001026 // as the types of the field can't be resolved prior to the runtime being
1027 // fully initialized
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001028 StackHandleScope<3> hs(self);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001029 Handle<mirror::Class> java_lang_ref_Reference =
1030 hs.NewHandle(GetClassRoot<mirror::Reference>(this));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001031 Handle<mirror::Class> java_lang_ref_FinalizerReference =
1032 hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08001033
Mathieu Chartierc7853442015-03-27 14:35:38 -07001034 ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001035 CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1036 CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001037
Mathieu Chartierc7853442015-03-27 14:35:38 -07001038 ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001039 CHECK_STREQ(queue->GetName(), "queue");
1040 CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001041
Mathieu Chartierc7853442015-03-27 14:35:38 -07001042 ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001043 CHECK_STREQ(queueNext->GetName(), "queueNext");
1044 CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001045
Mathieu Chartierc7853442015-03-27 14:35:38 -07001046 ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001047 CHECK_STREQ(referent->GetName(), "referent");
1048 CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001049
Mathieu Chartierc7853442015-03-27 14:35:38 -07001050 ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001051 CHECK_STREQ(zombie->GetName(), "zombie");
1052 CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001053
Brian Carlstroma663ea52011-08-19 23:33:41 -07001054 // ensure all class_roots_ are initialized
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001055 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
Brian Carlstroma663ea52011-08-19 23:33:41 -07001056 ClassRoot class_root = static_cast<ClassRoot>(i);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001057 ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07001058 CHECK(klass != nullptr);
1059 DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
Brian Carlstroma663ea52011-08-19 23:33:41 -07001060 // note SetClassRoot does additional validation.
1061 // if possible add new checks there to catch errors early
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001062 }
1063
Vladimir Marko02610552018-06-04 14:38:00 +01001064 CHECK(GetArrayIfTable() != nullptr);
Elliott Hughes92f14b22011-10-06 12:29:54 -07001065
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001066 // disable the slow paths in FindClass and CreatePrimitiveClass now
1067 // that Object, Class, and Object[] are setup
1068 init_done_ = true;
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001069
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001070 // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1071 // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1072 // ensure that the class will be initialized.
1073 if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
Andreas Gampee0bbab92019-07-25 12:28:22 -07001074 verifier::ClassVerifier::Init(this); // Need to prepare the verifier.
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001075
1076 ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1077 if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1078 // Strange, but don't crash.
1079 LOG(WARNING) << "Could not prepare StackOverflowError.";
1080 self->ClearException();
1081 }
1082 }
1083
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08001084 VLOG(startup) << "ClassLinker::FinishInit exiting";
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001085}
1086
Vladimir Markodcfcce42018-06-27 10:00:28 +00001087void ClassLinker::RunRootClinits(Thread* self) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001088 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1089 ObjPtr<mirror::Class> c = GetClassRoot(ClassRoot(i), this);
Elliott Hughes2a20cfd2011-09-23 19:30:41 -07001090 if (!c->IsArrayClass() && !c->IsPrimitive()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001091 StackHandleScope<1> hs(self);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001092 Handle<mirror::Class> h_class(hs.NewHandle(c));
David Srbecky08110ef2020-05-20 19:33:43 +01001093 if (!EnsureInitialized(self, h_class, true, true)) {
1094 LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1095 << ": " << self->GetException()->Dump();
1096 }
Vladimir Markodcfcce42018-06-27 10:00:28 +00001097 } else {
1098 DCHECK(c->IsInitialized());
Elliott Hughes2a20cfd2011-09-23 19:30:41 -07001099 }
1100 }
1101}
1102
Vladimir Marko8670e042021-12-21 17:55:48 +00001103ALWAYS_INLINE
1104static uint32_t ComputeMethodHash(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
1105 DCHECK(!method->IsRuntimeMethod());
1106 DCHECK(!method->IsProxyMethod());
1107 DCHECK(!method->IsObsolete());
1108 // Do not use `ArtMethod::GetNameView()` to avoid unnecessary runtime/proxy/obsolete method
1109 // checks. It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1110 const DexFile& dex_file = method->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1111 const dex::MethodId& method_id = dex_file.GetMethodId(method->GetDexMethodIndex());
1112 std::string_view name = dex_file.GetMethodNameView(method_id);
1113 return ComputeModifiedUtf8Hash(name);
1114}
1115
Vladimir Markobed84ef2022-01-21 13:57:14 +00001116ALWAYS_INLINE
1117static bool MethodSignatureEquals(ArtMethod* lhs, ArtMethod* rhs)
1118 REQUIRES_SHARED(Locks::mutator_lock_) {
1119 DCHECK(!lhs->IsRuntimeMethod());
1120 DCHECK(!lhs->IsProxyMethod());
1121 DCHECK(!lhs->IsObsolete());
1122 DCHECK(!rhs->IsRuntimeMethod());
1123 DCHECK(!rhs->IsProxyMethod());
1124 DCHECK(!rhs->IsObsolete());
1125 // Do not use `ArtMethod::GetDexFile()` to avoid unnecessary obsolete method checks.
1126 // It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1127 const DexFile& lhs_dex_file = lhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1128 const DexFile& rhs_dex_file = rhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1129 const dex::MethodId& lhs_mid = lhs_dex_file.GetMethodId(lhs->GetDexMethodIndex());
1130 const dex::MethodId& rhs_mid = rhs_dex_file.GetMethodId(rhs->GetDexMethodIndex());
1131 if (&lhs_dex_file == &rhs_dex_file) {
1132 return lhs_mid.name_idx_ == rhs_mid.name_idx_ &&
1133 lhs_mid.proto_idx_ == rhs_mid.proto_idx_;
1134 } else {
1135 return
1136 lhs_dex_file.GetMethodNameView(lhs_mid) == rhs_dex_file.GetMethodNameView(rhs_mid) &&
1137 lhs_dex_file.GetMethodSignature(lhs_mid) == rhs_dex_file.GetMethodSignature(rhs_mid);
1138 }
1139}
1140
Vladimir Marko43354742021-02-03 15:37:01 +00001141static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1142 PointerSize pointer_size,
1143 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1144 REQUIRES_SHARED(Locks::mutator_lock_) {
1145 ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1146 DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1147 for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
Vladimir Marko8670e042021-12-21 17:55:48 +00001148 virtual_method_hashes[i] = ComputeMethodHash(&virtual_methods[i]);
Vladimir Marko43354742021-02-03 15:37:01 +00001149 }
1150}
1151
Jeff Haodcdc85b2015-12-04 14:06:18 -08001152struct TrampolineCheckData {
1153 const void* quick_resolution_trampoline;
1154 const void* quick_imt_conflict_trampoline;
1155 const void* quick_generic_jni_trampoline;
1156 const void* quick_to_interpreter_bridge_trampoline;
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001157 const void* nterp_trampoline;
Andreas Gampe542451c2016-07-26 09:02:02 -07001158 PointerSize pointer_size;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001159 ArtMethod* m;
1160 bool error;
1161};
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001162
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001163bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1164 VLOG(startup) << __FUNCTION__ << " entering";
Brian Carlstroma663ea52011-08-19 23:33:41 -07001165 CHECK(!init_done_);
1166
Mathieu Chartierdaaf3262015-03-24 13:30:28 -07001167 Runtime* const runtime = Runtime::Current();
1168 Thread* const self = Thread::Current();
1169 gc::Heap* const heap = runtime->GetHeap();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001170 std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1171 CHECK(!spaces.empty());
Vladimir Marko024d69f2019-06-13 10:52:32 +01001172 const ImageHeader& image_header = spaces[0]->GetImageHeader();
1173 uint32_t pointer_size_unchecked = image_header.GetPointerSizeUnchecked();
Andreas Gampe542451c2016-07-26 09:02:02 -07001174 if (!ValidPointerSize(pointer_size_unchecked)) {
1175 *error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001176 return false;
1177 }
Vladimir Marko3364d182019-03-13 13:55:01 +00001178 image_pointer_size_ = image_header.GetPointerSize();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001179 if (!runtime->IsAotCompiler()) {
1180 // Only the Aot compiler supports having an image with a different pointer size than the
1181 // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1182 // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
Andreas Gampe542451c2016-07-26 09:02:02 -07001183 if (image_pointer_size_ != kRuntimePointerSize) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001184 *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
Andreas Gampe542451c2016-07-26 09:02:02 -07001185 static_cast<size_t>(image_pointer_size_),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001186 sizeof(void*));
1187 return false;
1188 }
1189 }
Vladimir Marko3364d182019-03-13 13:55:01 +00001190 DCHECK(!runtime->HasResolutionMethod());
1191 runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1192 runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1193 runtime->SetImtUnimplementedMethod(
1194 image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1195 runtime->SetCalleeSaveMethod(
1196 image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1197 CalleeSaveType::kSaveAllCalleeSaves);
1198 runtime->SetCalleeSaveMethod(
1199 image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1200 CalleeSaveType::kSaveRefsOnly);
1201 runtime->SetCalleeSaveMethod(
1202 image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1203 CalleeSaveType::kSaveRefsAndArgs);
1204 runtime->SetCalleeSaveMethod(
1205 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1206 CalleeSaveType::kSaveEverything);
1207 runtime->SetCalleeSaveMethod(
1208 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1209 CalleeSaveType::kSaveEverythingForClinit);
1210 runtime->SetCalleeSaveMethod(
1211 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1212 CalleeSaveType::kSaveEverythingForSuspendCheck);
1213
Jeff Haodcdc85b2015-12-04 14:06:18 -08001214 std::vector<const OatFile*> oat_files =
1215 runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1216 DCHECK(!oat_files.empty());
1217 const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001218 jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
Vladimir Markofa458ac2020-02-12 14:08:07 +00001219 jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001220 quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1221 quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1222 quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1223 quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001224 nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001225 if (kIsDebugBuild) {
1226 // Check that the other images use the same trampoline.
1227 for (size_t i = 1; i < oat_files.size(); ++i) {
1228 const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001229 const void* ith_jni_dlsym_lookup_trampoline_ =
1230 ith_oat_header.GetJniDlsymLookupTrampoline();
Vladimir Markofa458ac2020-02-12 14:08:07 +00001231 const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1232 ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001233 const void* ith_quick_resolution_trampoline =
1234 ith_oat_header.GetQuickResolutionTrampoline();
1235 const void* ith_quick_imt_conflict_trampoline =
1236 ith_oat_header.GetQuickImtConflictTrampoline();
1237 const void* ith_quick_generic_jni_trampoline =
1238 ith_oat_header.GetQuickGenericJniTrampoline();
1239 const void* ith_quick_to_interpreter_bridge_trampoline =
1240 ith_oat_header.GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001241 const void* ith_nterp_trampoline =
1242 ith_oat_header.GetNterpTrampoline();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001243 if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
Vladimir Markofa458ac2020-02-12 14:08:07 +00001244 ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
Vladimir Marko7dac8642019-11-06 17:09:30 +00001245 ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
Jeff Haodcdc85b2015-12-04 14:06:18 -08001246 ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1247 ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001248 ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1249 ith_nterp_trampoline != nterp_trampoline_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001250 // Make sure that all methods in this image do not contain those trampolines as
1251 // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1252 TrampolineCheckData data;
1253 data.error = false;
1254 data.pointer_size = GetImagePointerSize();
1255 data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1256 data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1257 data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1258 data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001259 data.nterp_trampoline = ith_nterp_trampoline;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001260 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
Andreas Gampe0c183382017-07-13 22:26:24 -07001261 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1262 if (obj->IsClass()) {
1263 ObjPtr<mirror::Class> klass = obj->AsClass();
1264 for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1265 const void* entrypoint =
1266 m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1267 if (entrypoint == data.quick_resolution_trampoline ||
1268 entrypoint == data.quick_imt_conflict_trampoline ||
1269 entrypoint == data.quick_generic_jni_trampoline ||
1270 entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1271 data.m = &m;
1272 data.error = true;
1273 return;
1274 }
1275 }
1276 }
1277 };
1278 spaces[i]->GetLiveBitmap()->Walk(visitor);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001279 if (data.error) {
1280 ArtMethod* m = data.m;
David Sehr709b0702016-10-13 09:12:37 -07001281 LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001282 *error_msg = "Found an ArtMethod with a bad entrypoint";
1283 return false;
1284 }
1285 }
1286 }
1287 }
Brian Carlstrom58ae9412011-10-04 00:56:06 -07001288
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001289 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
Vladimir Markod7e9bbf2019-03-28 13:18:57 +00001290 ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
Vladimir Marko024d69f2019-06-13 10:52:32 +01001291 image_header.GetImageRoot(ImageHeader::kClassRoots)));
Vladimir Markof75613c2018-06-05 12:51:04 +01001292 DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
Mathieu Chartier02b6a782012-10-26 13:51:26 -07001293
Vladimir Marko024d69f2019-06-13 10:52:32 +01001294 DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1295 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1296 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1297 image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1298 runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1299 DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07001300
Vladimir Markod1908512018-11-22 14:57:28 +00001301 for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001302 // Boot class loader, use a null handle.
1303 std::vector<std::unique_ptr<const DexFile>> dex_files;
Vladimir Markod1908512018-11-22 14:57:28 +00001304 if (!AddImageSpace(spaces[i],
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001305 ScopedNullHandle<mirror::ClassLoader>(),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001306 /*out*/&dex_files,
1307 error_msg)) {
1308 return false;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001309 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001310 // Append opened dex files at the end.
1311 boot_dex_files_.insert(boot_dex_files_.end(),
1312 std::make_move_iterator(dex_files.begin()),
1313 std::make_move_iterator(dex_files.end()));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001314 }
Mathieu Chartierbe8303d2017-08-17 17:39:39 -07001315 for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
Jared Duke95bb9952021-08-11 15:07:25 -07001316 OatDexFile::MadviseDexFileAtLoad(*dex_file);
Mathieu Chartierbe8303d2017-08-17 17:39:39 -07001317 }
Vladimir Marko43354742021-02-03 15:37:01 +00001318 InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1319 image_pointer_size_,
1320 ArrayRef<uint32_t>(object_virtual_method_hashes_));
Ian Rogers98379392014-02-24 16:53:16 -08001321 FinishInit(self);
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001322
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001323 VLOG(startup) << __FUNCTION__ << " exiting";
1324 return true;
1325}
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08001326
Vladimir Marko4433c432018-12-04 14:57:47 +00001327void ClassLinker::AddExtraBootDexFiles(
1328 Thread* self,
1329 std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1330 for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08001331 AppendToBootClassPath(self, dex_file.get());
Orion Hodson771708f2021-01-06 15:45:16 +00001332 if (kIsDebugBuild) {
1333 for (const auto& boot_dex_file : boot_dex_files_) {
1334 DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1335 }
1336 }
Vladimir Marko4433c432018-12-04 14:57:47 +00001337 boot_dex_files_.push_back(std::move(dex_file));
1338 }
1339}
1340
Jeff Hao5872d7c2016-04-27 11:07:41 -07001341bool ClassLinker::IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001342 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001343 return class_loader == nullptr ||
Mathieu Chartier0795f232016-09-27 18:43:30 -07001344 soa.Decode<mirror::Class>(WellKnownClasses::java_lang_BootClassLoader) ==
1345 class_loader->GetClass();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001346}
1347
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03001348class CHAOnDeleteUpdateClassVisitor {
1349 public:
1350 explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1351 : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1352 pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1353 self_(Thread::Current()) {}
1354
1355 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1356 // This class is going to be unloaded. Tell CHA about it.
1357 cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1358 return true;
1359 }
1360 private:
1361 const LinearAlloc* allocator_;
1362 const ClassHierarchyAnalysis* cha_;
1363 const PointerSize pointer_size_;
1364 const Thread* self_;
1365};
1366
Chris Wailes0c61be42018-09-26 17:27:34 -07001367/*
Vladimir Marko8e05f092019-06-10 11:10:38 +01001368 * A class used to ensure that all references to strings interned in an AppImage have been
1369 * properly recorded in the interned references list, and is only ever run in debug mode.
Chris Wailes0c61be42018-09-26 17:27:34 -07001370 */
Vladimir Marko8e05f092019-06-10 11:10:38 +01001371class CountInternedStringReferencesVisitor {
Chang Xingba17dbd2017-06-28 21:27:56 +00001372 public:
Vladimir Marko8e05f092019-06-10 11:10:38 +01001373 CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1374 const InternTable::UnorderedSet& image_interns)
1375 : space_(space),
1376 image_interns_(image_interns),
1377 count_(0u) {}
Chris Wailes0c61be42018-09-26 17:27:34 -07001378
Chris Wailes0c61be42018-09-26 17:27:34 -07001379 void TestObject(ObjPtr<mirror::Object> referred_obj) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001380 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001381 if (referred_obj != nullptr &&
1382 space_.HasAddress(referred_obj.Ptr()) &&
1383 referred_obj->IsString()) {
1384 ObjPtr<mirror::String> referred_str = referred_obj->AsString();
Vladimir Marko365c0202022-03-22 09:53:31 +00001385 uint32_t hash = static_cast<uint32_t>(referred_str->GetStoredHashCode());
1386 // All image strings have the hash code calculated, even if they are not interned.
1387 DCHECK_EQ(hash, static_cast<uint32_t>(referred_str->ComputeHashCode()));
1388 auto it = image_interns_.FindWithHash(GcRoot<mirror::String>(referred_str), hash);
Vladimir Marko8e05f092019-06-10 11:10:38 +01001389 if (it != image_interns_.end() && it->Read() == referred_str) {
1390 ++count_;
Chris Wailesfbeef462018-10-19 14:16:35 -07001391 }
Chang Xingba17dbd2017-06-28 21:27:56 +00001392 }
Chang Xingba17dbd2017-06-28 21:27:56 +00001393 }
1394
Chris Wailes0c61be42018-09-26 17:27:34 -07001395 void VisitRootIfNonNull(
Chang Xingba17dbd2017-06-28 21:27:56 +00001396 mirror::CompressedReference<mirror::Object>* root) const
1397 REQUIRES_SHARED(Locks::mutator_lock_) {
1398 if (!root->IsNull()) {
1399 VisitRoot(root);
1400 }
1401 }
1402
Chris Wailes0c61be42018-09-26 17:27:34 -07001403 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001404 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001405 TestObject(root->AsMirrorPtr());
Chang Xingba17dbd2017-06-28 21:27:56 +00001406 }
1407
1408 // Visit Class Fields
Chris Wailes0c61be42018-09-26 17:27:34 -07001409 void operator()(ObjPtr<mirror::Object> obj,
1410 MemberOffset offset,
1411 bool is_static ATTRIBUTE_UNUSED) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001412 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko8e05f092019-06-10 11:10:38 +01001413 // References within image or across images don't need a read barrier.
1414 ObjPtr<mirror::Object> referred_obj =
1415 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1416 TestObject(referred_obj);
Chang Xingba17dbd2017-06-28 21:27:56 +00001417 }
1418
1419 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1420 ObjPtr<mirror::Reference> ref) const
1421 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Vladimir Marko8e05f092019-06-10 11:10:38 +01001422 operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
Chang Xingba17dbd2017-06-28 21:27:56 +00001423 }
1424
Vladimir Marko8e05f092019-06-10 11:10:38 +01001425 size_t GetCount() const {
1426 return count_;
1427 }
1428
1429 private:
Chris Wailes0c61be42018-09-26 17:27:34 -07001430 const gc::space::ImageSpace& space_;
Vladimir Marko8e05f092019-06-10 11:10:38 +01001431 const InternTable::UnorderedSet& image_interns_;
1432 mutable size_t count_; // Modified from the `const` callbacks.
Chang Xingba17dbd2017-06-28 21:27:56 +00001433};
1434
Chris Wailes0c61be42018-09-26 17:27:34 -07001435/*
Vladimir Marko8e05f092019-06-10 11:10:38 +01001436 * This function counts references to strings interned in the AppImage.
1437 * This is used in debug build to check against the number of the recorded references.
Chris Wailes0c61be42018-09-26 17:27:34 -07001438 */
Vladimir Marko8e05f092019-06-10 11:10:38 +01001439size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1440 const InternTable::UnorderedSet& image_interns)
1441 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001442 const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1443 const ImageHeader& image_header = space.GetImageHeader();
1444 const uint8_t* target_base = space.GetMemMap()->Begin();
1445 const ImageSection& objects_section = image_header.GetObjectsSection();
Chris Wailesfbeef462018-10-19 14:16:35 -07001446
1447 auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1448 auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
Chris Wailes0c61be42018-09-26 17:27:34 -07001449
Vladimir Marko8e05f092019-06-10 11:10:38 +01001450 CountInternedStringReferencesVisitor visitor(space, image_interns);
Chris Wailes0c61be42018-09-26 17:27:34 -07001451 bitmap->VisitMarkedRange(objects_begin,
1452 objects_end,
1453 [&space, &visitor](mirror::Object* obj)
1454 REQUIRES_SHARED(Locks::mutator_lock_) {
1455 if (space.HasAddress(obj)) {
1456 if (obj->IsDexCache()) {
Chris Wailesfbeef462018-10-19 14:16:35 -07001457 obj->VisitReferences</* kVisitNativeRoots= */ true,
1458 kVerifyNone,
1459 kWithoutReadBarrier>(visitor, visitor);
Chris Wailes0c61be42018-09-26 17:27:34 -07001460 } else {
1461 // Don't visit native roots for non-dex-cache as they can't contain
1462 // native references to strings. This is verified during compilation
1463 // by ImageWriter::VerifyNativeGCRootInvariants.
Chris Wailesfbeef462018-10-19 14:16:35 -07001464 obj->VisitReferences</* kVisitNativeRoots= */ false,
1465 kVerifyNone,
1466 kWithoutReadBarrier>(visitor, visitor);
Chris Wailes0c61be42018-09-26 17:27:34 -07001467 }
1468 }
1469 });
Vladimir Marko8e05f092019-06-10 11:10:38 +01001470 return visitor.GetCount();
1471}
1472
1473template <typename Visitor>
1474static void VisitInternedStringReferences(
1475 gc::space::ImageSpace* space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001476 const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1477 const uint8_t* target_base = space->Begin();
1478 const ImageSection& sro_section =
1479 space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1480 const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1481
1482 VLOG(image)
1483 << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1484 << num_string_offsets;
1485
1486 const auto* sro_base =
1487 reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1488
1489 for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1490 uint32_t base_offset = sro_base[offset_index].first;
1491
David Srbecky86d6cd52020-12-02 18:13:10 +00001492 uint32_t raw_member_offset = sro_base[offset_index].second;
1493 DCHECK_ALIGNED(base_offset, 2);
1494 DCHECK_ALIGNED(raw_member_offset, 2);
Vladimir Marko8e05f092019-06-10 11:10:38 +01001495
David Srbecky86d6cd52020-12-02 18:13:10 +00001496 ObjPtr<mirror::Object> obj_ptr =
1497 reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1498 MemberOffset member_offset(raw_member_offset);
1499 ObjPtr<mirror::String> referred_string =
1500 obj_ptr->GetFieldObject<mirror::String,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001501 kVerifyNone,
David Srbecky86d6cd52020-12-02 18:13:10 +00001502 kWithoutReadBarrier,
1503 /* kIsVolatile= */ false>(member_offset);
1504 DCHECK(referred_string != nullptr);
1505
1506 ObjPtr<mirror::String> visited = visitor(referred_string);
1507 if (visited != referred_string) {
1508 obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1509 /* kCheckTransaction= */ false,
1510 kVerifyNone,
1511 /* kIsVolatile= */ false>(member_offset, visited);
Vladimir Marko8e05f092019-06-10 11:10:38 +01001512 }
1513 }
1514}
1515
1516static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1517 REQUIRES_SHARED(Locks::mutator_lock_) {
1518 InternTable::UnorderedSet image_interns;
1519 const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1520 if (section.Size() > 0) {
1521 size_t read_count;
1522 const uint8_t* data = space->Begin() + section.Offset();
1523 InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1524 image_set.swap(image_interns);
1525 }
1526 size_t num_recorded_refs = 0u;
1527 VisitInternedStringReferences(
1528 space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001529 [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1530 REQUIRES_SHARED(Locks::mutator_lock_) {
1531 auto it = image_interns.find(GcRoot<mirror::String>(str));
1532 CHECK(it != image_interns.end());
1533 CHECK(it->Read() == str);
1534 ++num_recorded_refs;
1535 return str;
1536 });
1537 size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1538 CHECK_EQ(num_recorded_refs, num_found_refs);
Chris Wailes0c61be42018-09-26 17:27:34 -07001539}
1540
Andreas Gampe2af99022017-04-25 08:32:59 -07001541// new_class_set is the set of classes that were read from the class table section in the image.
1542// If there was no class table section, it is null.
1543// Note: using a class here to avoid having to make ClassLinker internals public.
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001544class AppImageLoadingHelper {
Andreas Gampe2af99022017-04-25 08:32:59 -07001545 public:
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001546 static void Update(
Andreas Gampe2af99022017-04-25 08:32:59 -07001547 ClassLinker* class_linker,
1548 gc::space::ImageSpace* space,
1549 Handle<mirror::ClassLoader> class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001550 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
Andreas Gampe2af99022017-04-25 08:32:59 -07001551 REQUIRES(!Locks::dex_lock_)
1552 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001553
Chris Wailesfbeef462018-10-19 14:16:35 -07001554 static void HandleAppImageStrings(gc::space::ImageSpace* space)
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001555 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe2af99022017-04-25 08:32:59 -07001556};
1557
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001558void AppImageLoadingHelper::Update(
Andreas Gampe2af99022017-04-25 08:32:59 -07001559 ClassLinker* class_linker,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001560 gc::space::ImageSpace* space,
1561 Handle<mirror::ClassLoader> class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001562 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
Andreas Gampe2af99022017-04-25 08:32:59 -07001563 REQUIRES(!Locks::dex_lock_)
1564 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes23866362018-08-22 16:16:58 -07001565 ScopedTrace app_image_timing("AppImage:Updating");
1566
Vladimir Marko8e05f092019-06-10 11:10:38 +01001567 if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1568 // In debug build, verify the string references before applying
1569 // the Runtime::LoadAppImageStartupCache() option.
1570 VerifyInternedStringReferences(space);
1571 }
1572
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001573 Thread* const self = Thread::Current();
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001574 Runtime* const runtime = Runtime::Current();
1575 gc::Heap* const heap = runtime->GetHeap();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001576 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001577 {
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001578 // Register dex caches with the class loader.
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001579 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Alex Lighta9bbc082019-11-14 14:51:41 -08001580 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001581 const DexFile* const dex_file = dex_cache->GetDexFile();
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001582 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08001583 WriterMutexLock mu2(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08001584 CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
Andreas Gampe2af99022017-04-25 08:32:59 -07001585 class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001586 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001587 }
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001588 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001589
Mathieu Chartier0933cc52018-03-23 14:25:08 -07001590 if (ClassLinker::kAppImageMayContainStrings) {
Chris Wailesfbeef462018-10-19 14:16:35 -07001591 HandleAppImageStrings(space);
Chang Xingba17dbd2017-06-28 21:27:56 +00001592 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001593
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001594 if (kVerifyArtMethodDeclaringClasses) {
Chris Wailes23866362018-08-22 16:16:58 -07001595 ScopedTrace timing("AppImage:VerifyDeclaringClasses");
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001596 ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001597 gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1598 header.VisitPackedArtMethods([&](ArtMethod& method)
1599 REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1600 ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1601 if (klass != nullptr) {
1602 CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1603 }
1604 }, space->Begin(), kRuntimePointerSize);
Mathieu Chartier03c1dd92016-03-07 16:13:54 -08001605 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001606}
1607
Chris Wailesfbeef462018-10-19 14:16:35 -07001608void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001609 // Iterate over the string reference offsets stored in the image and intern
1610 // the strings they point to.
1611 ScopedTrace timing("AppImage:InternString");
1612
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001613 Runtime* const runtime = Runtime::Current();
1614 InternTable* const intern_table = runtime->GetInternTable();
1615
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001616 // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1617 // for faster lookup.
1618 // TODO: Optimize with a bitmap or bloom filter
1619 SafeMap<mirror::String*, mirror::String*> intern_remap;
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001620 auto func = [&](InternTable::UnorderedSet& interns)
Mathieu Chartier41c08082018-10-31 11:50:26 -07001621 REQUIRES_SHARED(Locks::mutator_lock_)
1622 REQUIRES(Locks::intern_table_lock_) {
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001623 const size_t non_boot_image_strings = intern_table->CountInterns(
1624 /*visit_boot_images=*/false,
1625 /*visit_non_boot_images=*/true);
Chris Wailesfbeef462018-10-19 14:16:35 -07001626 VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001627 VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1628 // Visit the smaller of the two sets to compute the intersection.
1629 if (interns.size() < non_boot_image_strings) {
1630 for (auto it = interns.begin(); it != interns.end(); ) {
1631 ObjPtr<mirror::String> string = it->Read();
1632 ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1633 if (existing == nullptr) {
1634 existing = intern_table->LookupStrongLocked(string);
1635 }
1636 if (existing != nullptr) {
1637 intern_remap.Put(string.Ptr(), existing.Ptr());
1638 it = interns.erase(it);
1639 } else {
1640 ++it;
1641 }
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001642 }
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001643 } else {
1644 intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1645 REQUIRES_SHARED(Locks::mutator_lock_)
1646 REQUIRES(Locks::intern_table_lock_) {
1647 auto it = interns.find(root);
1648 if (it != interns.end()) {
1649 ObjPtr<mirror::String> existing = root.Read();
1650 intern_remap.Put(it->Read(), existing.Ptr());
1651 it = interns.erase(it);
1652 }
1653 }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1654 }
David Srbecky346fd962020-07-27 16:51:00 +01001655 // Consistency check to ensure correctness.
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001656 if (kIsDebugBuild) {
1657 for (GcRoot<mirror::String>& root : interns) {
1658 ObjPtr<mirror::String> string = root.Read();
1659 CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1660 CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001661 }
1662 }
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001663 };
Vladimir Marko8e05f092019-06-10 11:10:38 +01001664 intern_table->AddImageStringsToTable(space, func);
1665 if (!intern_remap.empty()) {
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001666 VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
Vladimir Marko8e05f092019-06-10 11:10:38 +01001667 VisitInternedStringReferences(
1668 space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001669 [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1670 auto it = intern_remap.find(str.Ptr());
1671 if (it != intern_remap.end()) {
1672 return ObjPtr<mirror::String>(it->second);
1673 }
1674 return str;
1675 });
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001676 }
1677}
1678
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001679static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1680 const char* location,
1681 std::string* error_msg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001682 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001683 DCHECK(error_msg != nullptr);
1684 std::unique_ptr<const DexFile> dex_file;
Andreas Gampeb40d3612018-06-26 15:49:42 -07001685 const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001686 if (oat_dex_file == nullptr) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001687 return std::unique_ptr<const DexFile>();
1688 }
1689 std::string inner_error_msg;
1690 dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1691 if (dex_file == nullptr) {
1692 *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1693 location,
1694 oat_file->GetLocation().c_str(),
1695 inner_error_msg.c_str());
1696 return std::unique_ptr<const DexFile>();
1697 }
1698
1699 if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1700 *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1701 location,
1702 dex_file->GetLocationChecksum(),
1703 oat_dex_file->GetDexFileLocationChecksum());
1704 return std::unique_ptr<const DexFile>();
1705 }
1706 return dex_file;
1707}
1708
1709bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1710 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1711 std::string* error_msg) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07001712 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001713 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001714 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001715 DCHECK(dex_caches_object != nullptr);
Vladimir Marko4617d582019-03-28 13:48:31 +00001716 ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001717 dex_caches_object->AsObjectArray<mirror::DexCache>();
1718 const OatFile* oat_file = space->GetOatFile();
Alex Lighta9bbc082019-11-14 14:51:41 -08001719 for (auto dex_cache : dex_caches->Iterate()) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001720 std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1721 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1722 dex_file_location.c_str(),
1723 error_msg);
1724 if (dex_file == nullptr) {
1725 return false;
1726 }
1727 dex_cache->SetDexFile(dex_file.get());
1728 out_dex_files->push_back(std::move(dex_file));
1729 }
1730 return true;
1731}
1732
Andreas Gampe0793bec2016-12-01 11:37:33 -08001733// Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1734// together and caches some intermediate results.
Orion Hodson5880c772020-07-28 20:12:08 +01001735class ImageChecker final {
Andreas Gampe0793bec2016-12-01 11:37:33 -08001736 public:
1737 static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker)
1738 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodson5880c772020-07-28 20:12:08 +01001739 ImageChecker ic(heap, class_linker);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001740 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1741 DCHECK(obj != nullptr);
1742 CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj;
1743 CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
1744 if (obj->IsClass()) {
1745 auto klass = obj->AsClass();
1746 for (ArtField& field : klass->GetIFields()) {
1747 CHECK_EQ(field.GetDeclaringClass(), klass);
1748 }
1749 for (ArtField& field : klass->GetSFields()) {
1750 CHECK_EQ(field.GetDeclaringClass(), klass);
1751 }
Orion Hodson5880c772020-07-28 20:12:08 +01001752 const PointerSize pointer_size = ic.pointer_size_;
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001753 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
Orion Hodson5880c772020-07-28 20:12:08 +01001754 ic.CheckArtMethod(&m, klass);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001755 }
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001756 ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
Andreas Gampe1c158a02017-07-13 17:26:19 -07001757 if (vtable != nullptr) {
Orion Hodson5880c772020-07-28 20:12:08 +01001758 ic.CheckArtMethodPointerArray(vtable, nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001759 }
1760 if (klass->ShouldHaveImt()) {
1761 ImTable* imt = klass->GetImt(pointer_size);
1762 for (size_t i = 0; i < ImTable::kSize; ++i) {
Orion Hodson5880c772020-07-28 20:12:08 +01001763 ic.CheckArtMethod(imt->Get(i, pointer_size), nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001764 }
1765 }
1766 if (klass->ShouldHaveEmbeddedVTable()) {
1767 for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
Orion Hodson5880c772020-07-28 20:12:08 +01001768 ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001769 }
1770 }
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001771 ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
Andreas Gampe1c158a02017-07-13 17:26:19 -07001772 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
1773 if (iftable->GetMethodArrayCount(i) > 0) {
Orion Hodson5880c772020-07-28 20:12:08 +01001774 ic.CheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001775 }
1776 }
1777 }
1778 };
1779 heap->VisitObjects(visitor);
Andreas Gampe0793bec2016-12-01 11:37:33 -08001780 }
1781
Andreas Gampe0793bec2016-12-01 11:37:33 -08001782 private:
Orion Hodson5880c772020-07-28 20:12:08 +01001783 ImageChecker(gc::Heap* heap, ClassLinker* class_linker)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001784 : spaces_(heap->GetBootImageSpaces()),
1785 pointer_size_(class_linker->GetImagePointerSize()) {
1786 space_begin_.reserve(spaces_.size());
1787 method_sections_.reserve(spaces_.size());
1788 runtime_method_sections_.reserve(spaces_.size());
1789 for (gc::space::ImageSpace* space : spaces_) {
1790 space_begin_.push_back(space->Begin());
1791 auto& header = space->GetImageHeader();
1792 method_sections_.push_back(&header.GetMethodsSection());
1793 runtime_method_sections_.push_back(&header.GetRuntimeMethodsSection());
1794 }
1795 }
1796
Orion Hodson5880c772020-07-28 20:12:08 +01001797 void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001798 REQUIRES_SHARED(Locks::mutator_lock_) {
1799 if (m->IsRuntimeMethod()) {
1800 ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked();
1801 CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
1802 } else if (m->IsCopied()) {
1803 CHECK(m->GetDeclaringClass() != nullptr) << m->PrettyMethod();
1804 } else if (expected_class != nullptr) {
1805 CHECK_EQ(m->GetDeclaringClassUnchecked(), expected_class) << m->PrettyMethod();
1806 }
1807 if (!spaces_.empty()) {
1808 bool contains = false;
1809 for (size_t i = 0; !contains && i != space_begin_.size(); ++i) {
1810 const size_t offset = reinterpret_cast<uint8_t*>(m) - space_begin_[i];
1811 contains = method_sections_[i]->Contains(offset) ||
1812 runtime_method_sections_[i]->Contains(offset);
1813 }
1814 CHECK(contains) << m << " not found";
1815 }
1816 }
1817
Orion Hodson5880c772020-07-28 20:12:08 +01001818 void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,
1819 ObjPtr<mirror::Class> expected_class)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001820 REQUIRES_SHARED(Locks::mutator_lock_) {
1821 CHECK(arr != nullptr);
1822 for (int32_t j = 0; j < arr->GetLength(); ++j) {
1823 auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size_);
1824 // expected_class == null means we are a dex cache.
1825 if (expected_class != nullptr) {
1826 CHECK(method != nullptr);
1827 }
1828 if (method != nullptr) {
Orion Hodson5880c772020-07-28 20:12:08 +01001829 CheckArtMethod(method, expected_class);
Andreas Gampe0793bec2016-12-01 11:37:33 -08001830 }
1831 }
1832 }
1833
Andreas Gampe0793bec2016-12-01 11:37:33 -08001834 const std::vector<gc::space::ImageSpace*>& spaces_;
1835 const PointerSize pointer_size_;
1836
1837 // Cached sections from the spaces.
1838 std::vector<const uint8_t*> space_begin_;
1839 std::vector<const ImageSection*> method_sections_;
1840 std::vector<const ImageSection*> runtime_method_sections_;
1841};
1842
Andreas Gampebe7af222017-07-25 09:57:28 -07001843static void VerifyAppImage(const ImageHeader& header,
1844 const Handle<mirror::ClassLoader>& class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001845 ClassTable* class_table,
1846 gc::space::ImageSpace* space)
Andreas Gampebe7af222017-07-25 09:57:28 -07001847 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001848 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1849 ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
1850 if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
1851 CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
1852 << mirror::Class::PrettyClass(klass);
1853 }
1854 }, space->Begin(), kRuntimePointerSize);
Andreas Gampebe7af222017-07-25 09:57:28 -07001855 {
1856 // Verify that all direct interfaces of classes in the class table are also resolved.
1857 std::vector<ObjPtr<mirror::Class>> classes;
1858 auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
1859 REQUIRES_SHARED(Locks::mutator_lock_) {
1860 if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
1861 classes.push_back(klass);
1862 }
1863 return true;
1864 };
1865 class_table->Visit(verify_direct_interfaces_in_table);
Andreas Gampebe7af222017-07-25 09:57:28 -07001866 for (ObjPtr<mirror::Class> klass : classes) {
1867 for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
Vladimir Markob10668c2021-06-10 09:52:53 +01001868 CHECK(klass->GetDirectInterface(i) != nullptr)
Andreas Gampebe7af222017-07-25 09:57:28 -07001869 << klass->PrettyDescriptor() << " iface #" << i;
1870 }
1871 }
1872 }
Andreas Gampebe7af222017-07-25 09:57:28 -07001873}
1874
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001875bool ClassLinker::AddImageSpace(
1876 gc::space::ImageSpace* space,
1877 Handle<mirror::ClassLoader> class_loader,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001878 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1879 std::string* error_msg) {
1880 DCHECK(out_dex_files != nullptr);
1881 DCHECK(error_msg != nullptr);
1882 const uint64_t start_time = NanoTime();
Andreas Gampefa4333d2017-02-14 11:10:34 -08001883 const bool app_image = class_loader != nullptr;
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001884 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001885 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001886 DCHECK(dex_caches_object != nullptr);
1887 Runtime* const runtime = Runtime::Current();
1888 gc::Heap* const heap = runtime->GetHeap();
1889 Thread* const self = Thread::Current();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001890 // Check that the image is what we are expecting.
1891 if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
1892 *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
1893 static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
1894 image_pointer_size_);
1895 return false;
1896 }
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001897 size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
1898 if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
1899 *error_msg = StringPrintf("Expected %zu image roots but got %d",
1900 expected_image_roots,
1901 header.GetImageRoots()->GetLength());
1902 return false;
1903 }
1904 StackHandleScope<3> hs(self);
1905 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
1906 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
1907 Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
1908 header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001909 MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
Vladimir Markof75613c2018-06-05 12:51:04 +01001910 app_image ? header.GetImageRoot(ImageHeader::kAppImageClassLoader)->AsClassLoader()
1911 : nullptr));
Andreas Gampefa4333d2017-02-14 11:10:34 -08001912 DCHECK(class_roots != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001913 if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001914 *error_msg = StringPrintf("Expected %d class roots but got %d",
1915 class_roots->GetLength(),
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001916 static_cast<int32_t>(ClassRoot::kMax));
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001917 return false;
1918 }
1919 // Check against existing class roots to make sure they match the ones in the boot image.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001920 ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
1921 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1922 if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001923 *error_msg = "App image class roots must have pointer equality with runtime ones.";
1924 return false;
1925 }
1926 }
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001927 const OatFile* oat_file = space->GetOatFile();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001928 if (oat_file->GetOatHeader().GetDexFileCount() !=
1929 static_cast<uint32_t>(dex_caches->GetLength())) {
1930 *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
1931 "image";
1932 return false;
1933 }
1934
Alex Lighta9bbc082019-11-14 14:51:41 -08001935 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
David Brazdil3e8aae02019-03-26 18:48:02 +00001936 std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001937 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1938 dex_file_location.c_str(),
1939 error_msg);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001940 if (dex_file == nullptr) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001941 return false;
1942 }
1943
David Srbecky86d6cd52020-12-02 18:13:10 +00001944 {
David Srbecky33df0e32021-09-30 14:36:32 +00001945 // Native fields are all null. Initialize them.
David Srbecky86d6cd52020-12-02 18:13:10 +00001946 WriterMutexLock mu(self, *Locks::dex_lock_);
David Srbecky33df0e32021-09-30 14:36:32 +00001947 dex_cache->Initialize(dex_file.get(), class_loader.Get());
David Srbecky86d6cd52020-12-02 18:13:10 +00001948 }
1949 if (!app_image) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001950 // Register dex files, keep track of existing ones that are conflicts.
Mathieu Chartier0a19e212019-11-27 14:35:24 -08001951 AppendToBootClassPath(dex_file.get(), dex_cache);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001952 }
1953 out_dex_files->push_back(std::move(dex_file));
1954 }
1955
1956 if (app_image) {
1957 ScopedObjectAccessUnchecked soa(Thread::Current());
Nicolas Geoffrayf0d30022018-11-20 17:45:38 +00001958 ScopedAssertNoThreadSuspension sants("Checking app image", soa.Self());
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001959 if (IsBootClassLoader(soa, image_class_loader.Get())) {
1960 *error_msg = "Unexpected BootClassLoader in app image";
1961 return false;
1962 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001963 }
1964
Orion Hodson5880c772020-07-28 20:12:08 +01001965 if (kCheckImageObjects) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001966 if (!app_image) {
Orion Hodson5880c772020-07-28 20:12:08 +01001967 ImageChecker::CheckObjects(heap, this);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001968 }
1969 }
1970
1971 // Set entry point to interpreter if in InterpretOnly mode.
1972 if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001973 // Set image methods' entry point to interpreter.
1974 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1975 if (!method.IsRuntimeMethod()) {
1976 DCHECK(method.GetDeclaringClass() != nullptr);
Ulya Trafimovich5439f052020-07-29 10:03:46 +01001977 if (!method.IsNative() && !method.IsResolutionMethod()) {
1978 method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
1979 image_pointer_size_);
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001980 }
1981 }
1982 }, space->Begin(), image_pointer_size_);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001983 }
1984
Nicolas Geoffray47171752020-08-31 15:03:20 +01001985 if (!runtime->IsAotCompiler()) {
Nicolas Geoffraybd728b02021-01-27 13:21:35 +00001986 ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
Nicolas Geoffray47171752020-08-31 15:03:20 +01001987 bool can_use_nterp = interpreter::CanRuntimeUseNterp();
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00001988 uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
Nicolas Geoffray7e2c9632020-01-09 13:41:10 +00001989 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray47171752020-08-31 15:03:20 +01001990 // In the image, the `data` pointer field of the ArtMethod contains the code
1991 // item offset. Change this to the actual pointer to the code item.
1992 if (method.HasCodeItem()) {
1993 const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
1994 reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
zhaoxuyang7156ea22022-01-10 13:58:11 +08001995 method.SetCodeItem(code_item, method.GetDexFile()->IsCompactDexFile());
Nicolas Geoffray61673dc2021-11-06 13:58:31 +00001996 // The hotness counter may have changed since we compiled the image, so
1997 // reset it with the runtime value.
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00001998 method.ResetCounter(hotness_threshold);
Nicolas Geoffray47171752020-08-31 15:03:20 +01001999 }
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00002000 if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
2001 if (can_use_nterp) {
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +00002002 // Set image methods' entry point that point to the nterp trampoline to the
2003 // nterp entry point. This allows taking the fast path when doing a
2004 // nterp->nterp call.
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00002005 DCHECK_IMPLIES(NeedsClinitCheckBeforeCall(&method),
2006 method.GetDeclaringClass()->IsVisiblyInitialized());
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00002007 method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
2008 } else {
2009 method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
2010 }
Nicolas Geoffray47171752020-08-31 15:03:20 +01002011 }
Nicolas Geoffray7e2c9632020-01-09 13:41:10 +00002012 }, space->Begin(), image_pointer_size_);
2013 }
2014
Nicolas Geoffray8c41a0b2020-02-06 16:52:11 +00002015 if (runtime->IsVerificationSoftFail()) {
2016 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2017 if (!method.IsNative() && method.IsInvokable()) {
2018 method.ClearSkipAccessChecks();
2019 }
2020 }, space->Begin(), image_pointer_size_);
2021 }
2022
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002023 ClassTable* class_table = nullptr;
2024 {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002025 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002026 class_table = InsertClassTableForClassLoader(class_loader.Get());
Mathieu Chartier69731002016-03-02 16:08:31 -08002027 }
2028 // If we have a class table section, read it and use it for verification in
2029 // UpdateAppImageClassLoadersAndDexCaches.
2030 ClassTable::ClassSet temp_set;
Vladimir Marko0f3c7002017-09-07 14:15:56 +01002031 const ImageSection& class_table_section = header.GetClassTableSection();
Mathieu Chartier69731002016-03-02 16:08:31 -08002032 const bool added_class_table = class_table_section.Size() > 0u;
2033 if (added_class_table) {
2034 const uint64_t start_time2 = NanoTime();
2035 size_t read_count = 0;
2036 temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2037 /*make copy*/false,
2038 &read_count);
Mathieu Chartier69731002016-03-02 16:08:31 -08002039 VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002040 }
2041 if (app_image) {
David Srbecky86d6cd52020-12-02 18:13:10 +00002042 AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
Mathieu Chartier456b4922018-11-06 10:35:48 -08002043
2044 {
2045 ScopedTrace trace("AppImage:UpdateClassLoaders");
2046 // Update class loader and resolved strings. If added_class_table is false, the resolved
2047 // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002048 ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
Mathieu Chartier456b4922018-11-06 10:35:48 -08002049 for (const ClassTable::TableSlot& root : temp_set) {
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002050 // Note: We probably don't need the read barrier unless we copy the app image objects into
2051 // the region space.
2052 ObjPtr<mirror::Class> klass(root.Read());
2053 // Do not update class loader for boot image classes where the app image
2054 // class loader is only the initiating loader but not the defining loader.
2055 // Avoid read barrier since we are comparing against null.
2056 if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
Vladimir Markob68bb7a2020-03-17 10:55:25 +00002057 klass->SetClassLoader(loader);
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002058 }
Mathieu Chartier456b4922018-11-06 10:35:48 -08002059 }
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002060 }
Igor Murashkin86083f72017-10-27 10:59:04 -07002061
Vladimir Marko305c38b2018-02-14 11:50:07 +00002062 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -07002063 // Every class in the app image has initially SubtypeCheckInfo in the
2064 // Uninitialized state.
2065 //
2066 // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2067 // after class initialization is complete. The app image ClassStatus as-is
2068 // are almost all ClassStatus::Initialized, and being in the
2069 // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2070 //
2071 // Force every app image class's SubtypeCheck to be at least kIninitialized.
2072 //
2073 // See also ImageWriter::FixupClass.
Chris Wailes23866362018-08-22 16:16:58 -07002074 ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
Igor Murashkin86083f72017-10-27 10:59:04 -07002075 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2076 for (const ClassTable::TableSlot& root : temp_set) {
Vladimir Marko38b8b252018-01-02 19:07:06 +00002077 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
Igor Murashkin86083f72017-10-27 10:59:04 -07002078 }
2079 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00002080 }
2081 if (!oat_file->GetBssGcRoots().empty()) {
2082 // Insert oat file to class table for visiting .bss GC roots.
2083 class_table->InsertOatFile(oat_file);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002084 }
Igor Murashkin86083f72017-10-27 10:59:04 -07002085
Mathieu Chartier69731002016-03-02 16:08:31 -08002086 if (added_class_table) {
2087 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2088 class_table->AddClassSet(std::move(temp_set));
2089 }
Andreas Gampebe7af222017-07-25 09:57:28 -07002090
Mathieu Chartier69731002016-03-02 16:08:31 -08002091 if (kIsDebugBuild && app_image) {
2092 // This verification needs to happen after the classes have been added to the class loader.
2093 // Since it ensures classes are in the class table.
Chris Wailes23866362018-08-22 16:16:58 -07002094 ScopedTrace trace("AppImage:Verify");
David Srbecky86d6cd52020-12-02 18:13:10 +00002095 VerifyAppImage(header, class_loader, class_table, space);
Mathieu Chartier69731002016-03-02 16:08:31 -08002096 }
Andreas Gampebe7af222017-07-25 09:57:28 -07002097
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002098 VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08002099 return true;
Brian Carlstroma663ea52011-08-19 23:33:41 -07002100}
2101
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002102void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
Mathieu Chartier7778b882015-10-05 16:41:10 -07002103 // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2104 // enabling tracing requires the mutator lock, there are no race conditions here.
2105 const bool tracing_enabled = Trace::IsTracingEnabled();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002106 Thread* const self = Thread::Current();
2107 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002108 if (kUseReadBarrier) {
2109 // We do not track new roots for CC.
2110 DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2111 kVisitRootFlagClearRootLog |
2112 kVisitRootFlagStartLoggingNewRoots |
2113 kVisitRootFlagStopLoggingNewRoots));
2114 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002115 if ((flags & kVisitRootFlagAllRoots) != 0) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002116 // Argument for how root visiting deals with ArtField and ArtMethod roots.
2117 // There is 3 GC cases to handle:
2118 // Non moving concurrent:
2119 // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
Mathieu Chartierda7c6502015-07-23 16:01:26 -07002120 // live by the class and class roots.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002121 //
2122 // Moving non-concurrent:
2123 // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2124 // To prevent missing roots, this case needs to ensure that there is no
2125 // suspend points between the point which we allocate ArtMethod arrays and place them in a
2126 // class which is in the class table.
2127 //
2128 // Moving concurrent:
2129 // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2130 // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -08002131 //
2132 // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2133 // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2134 // these objects.
2135 UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
Andreas Gampe2af99022017-04-25 08:32:59 -07002136 boot_class_table_->VisitRoots(root_visitor);
Mathieu Chartier7778b882015-10-05 16:41:10 -07002137 // If tracing is enabled, then mark all the class loaders to prevent unloading.
neo.chaea2d1b282016-11-08 08:40:46 +09002138 if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
Mathieu Chartier7778b882015-10-05 16:41:10 -07002139 for (const ClassLoaderData& data : class_loaders_) {
2140 GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2141 root.VisitRoot(visitor, RootInfo(kRootVMInternal));
2142 }
2143 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002144 } else if (!kUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
Mathieu Chartierc2e20622014-11-03 11:41:47 -08002145 for (auto& root : new_class_roots_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002146 ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002147 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002148 ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002149 // Concurrent moving GC marked new roots through the to-space invariant.
2150 CHECK_EQ(new_ref, old_ref);
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002151 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00002152 for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2153 for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2154 ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2155 if (old_ref != nullptr) {
2156 DCHECK(old_ref->IsClass());
2157 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2158 ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2159 // Concurrent moving GC marked new roots through the to-space invariant.
2160 CHECK_EQ(new_ref, old_ref);
2161 }
2162 }
2163 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002164 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002165 if (!kUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002166 new_class_roots_.clear();
Vladimir Marko1998cd02017-01-13 13:02:58 +00002167 new_bss_roots_boot_oat_files_.clear();
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002168 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002169 if (!kUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00002170 log_new_roots_ = true;
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002171 } else if (!kUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00002172 log_new_roots_ = false;
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002173 }
2174 // We deliberately ignore the class roots in the image since we
2175 // handle image roots by using the MS/CMS rescanning of dirty cards.
2176}
2177
Brian Carlstroma663ea52011-08-19 23:33:41 -07002178// Keep in sync with InitCallback. Anything we visit, we need to
2179// reinit references to when reinitializing a ClassLinker from a
2180// mapped image.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002181void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
Mathieu Chartier31000802015-06-14 14:14:37 -07002182 class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002183 VisitClassRoots(visitor, flags);
Mathieu Chartier6cfc2c02015-10-12 15:06:16 -07002184 // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2185 // unloading if we are marking roots.
2186 DropFindArrayClassCache();
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07002187}
2188
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002189class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2190 public:
2191 explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2192 : visitor_(visitor),
2193 done_(false) {}
2194
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002195 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002196 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002197 ClassTable* const class_table = class_loader->GetClassTable();
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002198 if (!done_ && class_table != nullptr) {
2199 DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2200 if (!class_table->Visit(visitor)) {
2201 // If the visitor ClassTable returns false it means that we don't need to continue.
2202 done_ = true;
2203 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002204 }
2205 }
2206
2207 private:
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002208 // Class visitor that limits the class visits from a ClassTable to the classes with
2209 // the provided defining class loader. This filter is used to avoid multiple visits
2210 // of the same class which can be recorded for multiple initiating class loaders.
2211 class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2212 public:
2213 DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2214 ClassVisitor* visitor)
2215 : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2216
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002217 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002218 if (klass->GetClassLoader() != defining_class_loader_) {
2219 return true;
2220 }
2221 return (*visitor_)(klass);
2222 }
2223
Vladimir Marko0984e482019-03-27 16:41:41 +00002224 const ObjPtr<mirror::ClassLoader> defining_class_loader_;
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002225 ClassVisitor* const visitor_;
2226 };
2227
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002228 ClassVisitor* const visitor_;
2229 // If done is true then we don't need to do any more visiting.
2230 bool done_;
2231};
2232
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002233void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
Andreas Gampe2af99022017-04-25 08:32:59 -07002234 if (boot_class_table_->Visit(*visitor)) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002235 VisitClassLoaderClassesVisitor loader_visitor(visitor);
2236 VisitClassLoaders(&loader_visitor);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002237 }
2238}
2239
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002240void ClassLinker::VisitClasses(ClassVisitor* visitor) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002241 Thread* const self = Thread::Current();
2242 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2243 // Not safe to have thread suspension when we are holding a lock.
2244 if (self != nullptr) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002245 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002246 VisitClassesInternal(visitor);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002247 } else {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002248 VisitClassesInternal(visitor);
Elliott Hughesa2155262011-11-16 16:26:58 -08002249 }
2250}
2251
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002252class GetClassesInToVector : public ClassVisitor {
2253 public:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002254 bool operator()(ObjPtr<mirror::Class> klass) override {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002255 classes_.push_back(klass);
2256 return true;
2257 }
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002258 std::vector<ObjPtr<mirror::Class>> classes_;
Ian Rogersdbf3be02014-08-29 15:40:08 -07002259};
2260
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002261class GetClassInToObjectArray : public ClassVisitor {
2262 public:
2263 explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2264 : arr_(arr), index_(0) {}
2265
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002266 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002267 ++index_;
2268 if (index_ <= arr_->GetLength()) {
2269 arr_->Set(index_ - 1, klass);
2270 return true;
2271 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002272 return false;
2273 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002274
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002275 bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002276 return index_ <= arr_->GetLength();
2277 }
2278
2279 private:
2280 mirror::ObjectArray<mirror::Class>* const arr_;
2281 int32_t index_;
2282};
2283
2284void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002285 // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2286 // is avoiding duplicates.
2287 if (!kMovingClasses) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002288 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002289 GetClassesInToVector accumulator;
2290 VisitClasses(&accumulator);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002291 for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002292 if (!visitor->operator()(klass)) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002293 return;
2294 }
2295 }
2296 } else {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002297 Thread* const self = Thread::Current();
Ian Rogersdbf3be02014-08-29 15:40:08 -07002298 StackHandleScope<1> hs(self);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002299 auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
Ian Rogersdbf3be02014-08-29 15:40:08 -07002300 // We size the array assuming classes won't be added to the class table during the visit.
2301 // If this assumption fails we iterate again.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002302 while (true) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002303 size_t class_table_size;
2304 {
Ian Rogers7b078e82014-09-10 14:44:24 -07002305 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002306 // Add 100 in case new classes get loaded when we are filling in the object array.
2307 class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
Ian Rogersdbf3be02014-08-29 15:40:08 -07002308 }
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002309 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Ian Rogersdbf3be02014-08-29 15:40:08 -07002310 classes.Assign(
2311 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002312 CHECK(classes != nullptr); // OOME.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002313 GetClassInToObjectArray accumulator(classes.Get());
2314 VisitClasses(&accumulator);
2315 if (accumulator.Succeeded()) {
2316 break;
2317 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002318 }
2319 for (int32_t i = 0; i < classes->GetLength(); ++i) {
2320 // If the class table shrank during creation of the clases array we expect null elements. If
2321 // the class table grew then the loop repeats. If classes are created after the loop has
2322 // finished then we don't visit.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002323 ObjPtr<mirror::Class> klass = classes->Get(i);
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002324 if (klass != nullptr && !visitor->operator()(klass)) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002325 return;
2326 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -07002327 }
2328 }
2329}
2330
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07002331ClassLinker::~ClassLinker() {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002332 Thread* const self = Thread::Current();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07002333 for (const ClassLoaderData& data : class_loaders_) {
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002334 // CHA unloading analysis is not needed. No negative consequences are expected because
2335 // all the classloaders are deleted at the same time.
Andreas Gampe98ea9d92018-10-19 14:06:15 -07002336 DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
Mathieu Chartier6b069532015-08-05 15:08:12 -07002337 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002338 class_loaders_.clear();
Vladimir Markobf121912019-06-04 13:49:05 +01002339 while (!running_visibly_initialized_callbacks_.empty()) {
2340 std::unique_ptr<VisiblyInitializedCallback> callback(
2341 std::addressof(running_visibly_initialized_callbacks_.front()));
2342 running_visibly_initialized_callbacks_.pop_front();
2343 }
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07002344}
2345
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002346void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002347 Runtime* const runtime = Runtime::Current();
2348 JavaVMExt* const vm = runtime->GetJavaVM();
2349 vm->DeleteWeakGlobalRef(self, data.weak_root);
Calin Juravlee5de54c2016-04-20 14:22:09 +01002350 // Notify the JIT that we need to remove the methods and/or profiling info.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002351 if (runtime->GetJit() != nullptr) {
2352 jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2353 if (code_cache != nullptr) {
Mathieu Chartiercf79cf52017-07-21 11:17:57 -07002354 // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002355 code_cache->RemoveMethodsIn(self, *data.allocator);
2356 }
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07002357 } else if (cha_ != nullptr) {
Mathieu Chartiercf79cf52017-07-21 11:17:57 -07002358 // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07002359 cha_->RemoveDependenciesForLinearAlloc(data.allocator);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002360 }
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002361 // Cleanup references to single implementation ArtMethods that will be deleted.
2362 if (cleanup_cha) {
2363 CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
Vladimir Marko5f958f62022-02-08 12:01:07 +00002364 data.class_table->Visit<kWithoutReadBarrier>(visitor);
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002365 }
Vladimir Marko86c87522020-05-11 16:55:55 +01002366 {
2367 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2368 auto end = critical_native_code_with_clinit_check_.end();
2369 for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2370 if (data.allocator->ContainsUnsafe(it->first)) {
2371 it = critical_native_code_with_clinit_check_.erase(it);
2372 } else {
2373 ++it;
2374 }
2375 }
2376 }
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002377
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002378 delete data.allocator;
2379 delete data.class_table;
2380}
2381
Vladimir Markobcf17522018-06-01 13:14:32 +01002382ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2383 return ObjPtr<mirror::PointerArray>::DownCast(
Andreas Gampe542451c2016-07-26 09:02:02 -07002384 image_pointer_size_ == PointerSize::k64
Vladimir Markobcf17522018-06-01 13:14:32 +01002385 ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2386 : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002387}
2388
David Srbecky86d6cd52020-12-02 18:13:10 +00002389ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002390 StackHandleScope<1> hs(self);
Mathieu Chartier28bd2e42016-10-04 13:54:57 -07002391 auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002392 GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002393 if (dex_cache == nullptr) {
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002394 self->AssertPendingOOMException();
2395 return nullptr;
2396 }
Vladimir Marko31c3daa2019-06-13 12:18:37 +01002397 // Use InternWeak() so that the location String can be collected when the ClassLoader
2398 // with this DexCache is collected.
2399 ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002400 if (location == nullptr) {
2401 self->AssertPendingOOMException();
2402 return nullptr;
2403 }
David Srbecky86d6cd52020-12-02 18:13:10 +00002404 dex_cache->SetLocation(location);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002405 return dex_cache.Get();
Brian Carlstroma0808032011-07-18 00:39:23 -07002406}
2407
David Srbecky33df0e32021-09-30 14:36:32 +00002408ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(
2409 Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) {
2410 StackHandleScope<1> hs(self);
2411 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
David Srbecky86d6cd52020-12-02 18:13:10 +00002412 ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002413 if (dex_cache != nullptr) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08002414 WriterMutexLock mu(self, *Locks::dex_lock_);
David Srbecky33df0e32021-09-30 14:36:32 +00002415 dex_cache->Initialize(&dex_file, h_class_loader.Get());
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002416 }
Vladimir Markobcf17522018-06-01 13:14:32 +01002417 return dex_cache;
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002418}
2419
Vladimir Marko70e2a762019-07-12 16:49:00 +01002420template <bool kMovable, typename PreFenceVisitor>
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002421ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2422 ObjPtr<mirror::Class> java_lang_Class,
Vladimir Marko70e2a762019-07-12 16:49:00 +01002423 uint32_t class_size,
2424 const PreFenceVisitor& pre_fence_visitor) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08002425 DCHECK_GE(class_size, sizeof(mirror::Class));
Ian Rogers1d54e732013-05-02 21:10:01 -07002426 gc::Heap* heap = Runtime::Current()->GetHeap();
Roland Levillain0e840272018-08-23 19:55:30 +01002427 ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
Vladimir Marko70e2a762019-07-12 16:49:00 +01002428 heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2429 heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
Ian Rogers6fac4472014-02-25 17:01:10 -08002430 if (UNLIKELY(k == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002431 self->AssertPendingOOMException();
Ian Rogers6fac4472014-02-25 17:01:10 -08002432 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07002433 }
Ian Rogers6fac4472014-02-25 17:01:10 -08002434 return k->AsClass();
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07002435}
2436
Vladimir Marko70e2a762019-07-12 16:49:00 +01002437template <bool kMovable>
2438ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2439 ObjPtr<mirror::Class> java_lang_Class,
2440 uint32_t class_size) {
2441 mirror::Class::InitializeClassVisitor visitor(class_size);
2442 return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2443}
2444
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002445ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002446 return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
Brian Carlstroma0808032011-07-18 00:39:23 -07002447}
2448
Vladimir Marko70e2a762019-07-12 16:49:00 +01002449void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2450 ClassRoot primitive_root,
2451 ClassRoot array_root) {
Roland Levillain0e840272018-08-23 19:55:30 +01002452 // We make this class non-movable for the unlikely case where it were to be
2453 // moved by a sticky-bit (minor) collection when using the Generational
2454 // Concurrent Copying (CC) collector, potentially creating a stale reference
2455 // in the `klass_` field of one of its instances allocated in the Large-Object
2456 // Space (LOS) -- see the comment about the dirty card scanning logic in
2457 // art::gc::collector::ConcurrentCopying::MarkingPhase.
Vladimir Marko70e2a762019-07-12 16:49:00 +01002458 ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2459 self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2460 ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2461 DCHECK(component_type->IsPrimitive());
2462 array_class->SetComponentType(component_type);
2463 SetClassRoot(array_root, array_class);
2464}
2465
2466void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2467 ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2468 array_class->SetSuperClass(java_lang_Object);
2469 array_class->SetVTable(java_lang_Object->GetVTable());
2470 array_class->SetPrimitiveType(Primitive::kPrimNot);
2471 ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2472 array_class->SetClassFlags(component_type->IsPrimitive()
2473 ? mirror::kClassFlagNoReferenceFields
2474 : mirror::kClassFlagObjectArray);
2475 array_class->SetClassLoader(component_type->GetClassLoader());
2476 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2477 array_class->PopulateEmbeddedVTable(image_pointer_size_);
2478 ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2479 array_class->SetImt(object_imt, image_pointer_size_);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002480 DCHECK_EQ(array_class->NumMethods(), 0u);
2481
2482 // don't need to set new_class->SetObjectSize(..)
2483 // because Object::SizeOf delegates to Array::SizeOf
2484
2485 // All arrays have java/lang/Cloneable and java/io/Serializable as
2486 // interfaces. We need to set that up here, so that stuff like
2487 // "instanceof" works right.
2488
2489 // Use the single, global copies of "interfaces" and "iftable"
2490 // (remember not to free them for arrays).
2491 {
2492 ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2493 CHECK(array_iftable != nullptr);
2494 array_class->SetIfTable(array_iftable);
2495 }
2496
2497 // Inherit access flags from the component type.
2498 int access_flags = component_type->GetAccessFlags();
2499 // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2500 access_flags &= kAccJavaFlagsMask;
2501 // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2502 // and remove "interface".
2503 access_flags |= kAccAbstract | kAccFinal;
2504 access_flags &= ~kAccInterface;
Vladimir Marko70e2a762019-07-12 16:49:00 +01002505
Vladimir Markob68bb7a2020-03-17 10:55:25 +00002506 array_class->SetAccessFlagsDuringLinking(access_flags);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002507
Vladimir Markobf121912019-06-04 13:49:05 +01002508 // Array classes are fully initialized either during single threaded startup,
2509 // or from a pre-fence visitor, so visibly initialized.
2510 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002511}
2512
2513void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2514 // Do not hold lock on the array class object, the initialization of
2515 // core array classes is done while the process is still single threaded.
2516 ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2517 FinishArrayClassSetup(array_class);
2518
2519 std::string temp;
2520 const char* descriptor = array_class->GetDescriptor(&temp);
2521 size_t hash = ComputeModifiedUtf8Hash(descriptor);
2522 ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2523 CHECK(existing == nullptr);
Roland Levillain0e840272018-08-23 19:55:30 +01002524}
2525
Vladimir Markobcf17522018-06-01 13:14:32 +01002526ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07002527 Thread* self,
2528 size_t length) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07002529 return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002530 self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
Shih-wei Liao55df06b2011-08-26 14:39:27 -07002531}
2532
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002533ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2534 const char* descriptor,
2535 ObjPtr<mirror::Class> klass) {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07002536 DCHECK(klass != nullptr);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002537 if (kIsDebugBuild) {
2538 StackHandleScope<1> hs(self);
2539 HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2540 Thread::PoisonObjectPointersIfDebug();
2541 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002542
2543 // For temporary classes we must wait for them to be retired.
2544 if (init_done_ && klass->IsTemp()) {
2545 CHECK(!klass->IsResolved());
Vladimir Marko72ab6842017-01-20 19:32:50 +00002546 if (klass->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002547 ThrowEarlierClassFailure(klass);
2548 return nullptr;
2549 }
2550 StackHandleScope<1> hs(self);
2551 Handle<mirror::Class> h_class(hs.NewHandle(klass));
2552 ObjectLock<mirror::Class> lock(self, h_class);
2553 // Loop and wait for the resolving thread to retire this class.
Vladimir Marko72ab6842017-01-20 19:32:50 +00002554 while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002555 lock.WaitIgnoringInterrupts();
2556 }
Vladimir Marko72ab6842017-01-20 19:32:50 +00002557 if (h_class->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002558 ThrowEarlierClassFailure(h_class.Get());
2559 return nullptr;
2560 }
2561 CHECK(h_class->IsRetired());
2562 // Get the updated class from class table.
Andreas Gampe34ee6842014-12-02 15:43:52 -08002563 klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002564 }
2565
Brian Carlstromaded5f72011-10-07 17:15:04 -07002566 // Wait for the class if it has not already been linked.
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002567 size_t index = 0;
2568 // Maximum number of yield iterations until we start sleeping.
2569 static const size_t kNumYieldIterations = 1000;
2570 // How long each sleep is in us.
2571 static const size_t kSleepDurationUS = 1000; // 1 ms.
Vladimir Marko72ab6842017-01-20 19:32:50 +00002572 while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002573 StackHandleScope<1> hs(self);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002574 HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002575 {
2576 ObjectTryLock<mirror::Class> lock(self, h_class);
2577 // Can not use a monitor wait here since it may block when returning and deadlock if another
2578 // thread has locked klass.
2579 if (lock.Acquired()) {
2580 // Check for circular dependencies between classes, the lock is required for SetStatus.
2581 if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2582 ThrowClassCircularityError(h_class.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +00002583 mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002584 return nullptr;
2585 }
2586 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002587 }
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002588 {
2589 // Handle wrapper deals with klass moving.
Vladimir Markoddf4fd32021-11-22 16:31:57 +00002590 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002591 if (index < kNumYieldIterations) {
2592 sched_yield();
2593 } else {
2594 usleep(kSleepDurationUS);
2595 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002596 }
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002597 ++index;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002598 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002599
Vladimir Marko72ab6842017-01-20 19:32:50 +00002600 if (klass->IsErroneousUnresolved()) {
Elliott Hughes4a2b4172011-09-20 17:08:25 -07002601 ThrowEarlierClassFailure(klass);
Mathieu Chartierc528dba2013-11-26 12:00:11 -08002602 return nullptr;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002603 }
2604 // Return the loaded class. No exceptions should be pending.
David Sehr709b0702016-10-13 09:12:37 -07002605 CHECK(klass->IsResolved()) << klass->PrettyClass();
Ian Rogers62d6c772013-02-27 08:32:07 -08002606 self->AssertNoPendingException();
Vladimir Markobcf17522018-06-01 13:14:32 +01002607 return klass;
Brian Carlstromaded5f72011-10-07 17:15:04 -07002608}
2609
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08002610using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
Ian Rogers68b56852014-08-29 20:19:11 -07002611
2612// Search a collection of DexFiles for a descriptor
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002613ClassPathEntry FindInClassPath(const char* descriptor,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07002614 size_t hash, const std::vector<const DexFile*>& class_path) {
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002615 for (const DexFile* dex_file : class_path) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08002616 DCHECK(dex_file != nullptr);
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08002617 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07002618 if (dex_class_def != nullptr) {
Ian Rogers68b56852014-08-29 20:19:11 -07002619 return ClassPathEntry(dex_file, dex_class_def);
2620 }
2621 }
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002622 return ClassPathEntry(nullptr, nullptr);
Ian Rogers68b56852014-08-29 20:19:11 -07002623}
2624
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002625// Helper macro to make sure each class loader lookup call handles the case the
2626// class loader is not recognized, or the lookup threw an exception.
2627#define RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(call_, result_, thread_) \
2628do { \
2629 auto local_call = call_; \
2630 if (!local_call) { \
2631 return false; \
2632 } \
2633 auto local_result = result_; \
2634 if (local_result != nullptr) { \
2635 return true; \
2636 } \
2637 auto local_thread = thread_; \
2638 if (local_thread->IsExceptionPending()) { \
2639 /* Pending exception means there was an error other than */ \
2640 /* ClassNotFound that must be returned to the caller. */ \
2641 return false; \
2642 } \
2643} while (0)
2644
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002645bool ClassLinker::FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable& soa,
2646 Thread* self,
2647 const char* descriptor,
2648 size_t hash,
2649 Handle<mirror::ClassLoader> class_loader,
2650 /*out*/ ObjPtr<mirror::Class>* result) {
2651 ArtField* field =
2652 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
Brad Stenning9c924e82021-10-11 19:09:00 -07002653 return FindClassInSharedLibrariesHelper(soa, self, descriptor, hash, class_loader, field, result);
2654}
2655
2656bool ClassLinker::FindClassInSharedLibrariesHelper(ScopedObjectAccessAlreadyRunnable& soa,
2657 Thread* self,
2658 const char* descriptor,
2659 size_t hash,
2660 Handle<mirror::ClassLoader> class_loader,
2661 ArtField* field,
2662 /*out*/ ObjPtr<mirror::Class>* result) {
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002663 ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
2664 if (raw_shared_libraries == nullptr) {
2665 return true;
2666 }
2667
2668 StackHandleScope<2> hs(self);
2669 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
2670 hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
2671 MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
Alex Lighta9bbc082019-11-14 14:51:41 -08002672 for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
2673 temp_loader.Assign(loader);
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002674 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2675 FindClassInBaseDexClassLoader(soa, self, descriptor, hash, temp_loader, result),
2676 *result,
2677 self);
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002678 }
2679 return true;
2680}
2681
Brad Stenning9c924e82021-10-11 19:09:00 -07002682bool ClassLinker::FindClassInSharedLibrariesAfter(ScopedObjectAccessAlreadyRunnable& soa,
2683 Thread* self,
2684 const char* descriptor,
2685 size_t hash,
2686 Handle<mirror::ClassLoader> class_loader,
2687 /*out*/ ObjPtr<mirror::Class>* result) {
2688 ArtField* field = jni::DecodeArtField(
2689 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter);
2690 return FindClassInSharedLibrariesHelper(soa, self, descriptor, hash, class_loader, field, result);
2691}
2692
Nicolas Geoffray7d8d8ff2016-11-02 12:38:05 +00002693bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
2694 Thread* self,
2695 const char* descriptor,
2696 size_t hash,
2697 Handle<mirror::ClassLoader> class_loader,
Vladimir Markobcf17522018-06-01 13:14:32 +01002698 /*out*/ ObjPtr<mirror::Class>* result) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002699 // Termination case: boot class loader.
Andreas Gampef865ea92015-04-13 22:14:19 -07002700 if (IsBootClassLoader(soa, class_loader.Get())) {
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002701 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2702 FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
Andreas Gampef865ea92015-04-13 22:14:19 -07002703 return true;
2704 }
2705
David Brazdil05909d82018-12-06 16:25:16 +00002706 if (IsPathOrDexClassLoader(soa, class_loader) || IsInMemoryDexClassLoader(soa, class_loader)) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002707 // For regular path or dex class loader the search order is:
2708 // - parent
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002709 // - shared libraries
Calin Juravlecdd49122017-07-05 20:09:53 -07002710 // - class loader dex files
Andreas Gampef865ea92015-04-13 22:14:19 -07002711
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002712 // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
Calin Juravlecdd49122017-07-05 20:09:53 -07002713 StackHandleScope<1> hs(self);
2714 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002715 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2716 FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result),
2717 *result,
2718 self);
2719 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2720 FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result),
2721 *result,
2722 self);
2723 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2724 FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader, result),
2725 *result,
2726 self);
Brad Stenning9c924e82021-10-11 19:09:00 -07002727 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2728 FindClassInSharedLibrariesAfter(soa, self, descriptor, hash, class_loader, result),
2729 *result,
2730 self);
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002731 // We did not find a class, but the class loader chain was recognized, so we
2732 // return true.
2733 return true;
Andreas Gampef865ea92015-04-13 22:14:19 -07002734 }
2735
Calin Juravlecdd49122017-07-05 20:09:53 -07002736 if (IsDelegateLastClassLoader(soa, class_loader)) {
2737 // For delegate last, the search order is:
2738 // - boot class path
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002739 // - shared libraries
Calin Juravlecdd49122017-07-05 20:09:53 -07002740 // - class loader dex files
2741 // - parent
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002742 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2743 FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
2744 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2745 FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result),
2746 *result,
2747 self);
2748 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2749 FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader, result),
2750 *result,
2751 self);
Brad Stenning9c924e82021-10-11 19:09:00 -07002752 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2753 FindClassInSharedLibrariesAfter(soa, self, descriptor, hash, class_loader, result),
2754 *result,
2755 self);
Calin Juravlecdd49122017-07-05 20:09:53 -07002756
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002757 // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
Calin Juravlecdd49122017-07-05 20:09:53 -07002758 StackHandleScope<1> hs(self);
2759 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002760 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2761 FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result),
2762 *result,
2763 self);
2764 // We did not find a class, but the class loader chain was recognized, so we
2765 // return true.
2766 return true;
Calin Juravlecdd49122017-07-05 20:09:53 -07002767 }
2768
2769 // Unsupported class loader.
2770 *result = nullptr;
2771 return false;
Calin Juravle415dc3d2017-06-28 11:03:12 -07002772}
2773
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002774#undef RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION
2775
Andreas Gampe501c3b02019-04-17 21:54:27 +00002776namespace {
2777
2778// Matches exceptions caught in DexFile.defineClass.
2779ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
2780 ClassLinker* class_linker)
2781 REQUIRES_SHARED(Locks::mutator_lock_) {
2782 return
2783 // ClassNotFoundException.
2784 throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
2785 class_linker))
2786 ||
2787 // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
2788 throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
2789}
2790
2791// Clear exceptions caught in DexFile.defineClass.
2792ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
2793 REQUIRES_SHARED(Locks::mutator_lock_) {
2794 if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
2795 self->ClearException();
2796 }
2797}
2798
2799} // namespace
2800
Calin Juravle415dc3d2017-06-28 11:03:12 -07002801// Finds the class in the boot class loader.
2802// If the class is found the method returns the resolved class. Otherwise it returns null.
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002803bool ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
2804 const char* descriptor,
2805 size_t hash,
2806 /*out*/ ObjPtr<mirror::Class>* result) {
Calin Juravle415dc3d2017-06-28 11:03:12 -07002807 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2808 if (pair.second != nullptr) {
2809 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
2810 if (klass != nullptr) {
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002811 *result = EnsureResolved(self, descriptor, klass);
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002812 } else {
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002813 *result = DefineClass(self,
2814 descriptor,
2815 hash,
2816 ScopedNullHandle<mirror::ClassLoader>(),
2817 *pair.first,
2818 *pair.second);
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002819 }
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002820 if (*result == nullptr) {
Calin Juravle415dc3d2017-06-28 11:03:12 -07002821 CHECK(self->IsExceptionPending()) << descriptor;
Andreas Gampe501c3b02019-04-17 21:54:27 +00002822 FilterDexFileCaughtExceptions(self, this);
Andreas Gampef865ea92015-04-13 22:14:19 -07002823 }
2824 }
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002825 // The boot classloader is always a known lookup.
2826 return true;
Calin Juravle415dc3d2017-06-28 11:03:12 -07002827}
Andreas Gampef865ea92015-04-13 22:14:19 -07002828
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002829bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
Calin Juravle415dc3d2017-06-28 11:03:12 -07002830 ScopedObjectAccessAlreadyRunnable& soa,
2831 const char* descriptor,
2832 size_t hash,
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002833 Handle<mirror::ClassLoader> class_loader,
2834 /*out*/ ObjPtr<mirror::Class>* result) {
David Brazdil05909d82018-12-06 16:25:16 +00002835 DCHECK(IsPathOrDexClassLoader(soa, class_loader) ||
2836 IsInMemoryDexClassLoader(soa, class_loader) ||
2837 IsDelegateLastClassLoader(soa, class_loader))
Calin Juravle415dc3d2017-06-28 11:03:12 -07002838 << "Unexpected class loader for descriptor " << descriptor;
Andreas Gampef865ea92015-04-13 22:14:19 -07002839
Vladimir Marko68c07582021-04-19 16:01:15 +00002840 const DexFile* dex_file = nullptr;
2841 const dex::ClassDef* class_def = nullptr;
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002842 ObjPtr<mirror::Class> ret;
Vladimir Marko68c07582021-04-19 16:01:15 +00002843 auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
2844 const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
2845 if (cp_class_def != nullptr) {
2846 dex_file = cp_dex_file;
2847 class_def = cp_class_def;
2848 return false; // Found a class definition, stop visit.
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002849 }
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002850 return true; // Continue with the next DexFile.
2851 };
Vladimir Marko68c07582021-04-19 16:01:15 +00002852 VisitClassLoaderDexFiles(soa, class_loader, find_class_def);
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002853
Vladimir Marko68c07582021-04-19 16:01:15 +00002854 if (class_def != nullptr) {
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002855 *result = DefineClass(soa.Self(), descriptor, hash, class_loader, *dex_file, *class_def);
2856 if (UNLIKELY(*result == nullptr)) {
Vladimir Marko68c07582021-04-19 16:01:15 +00002857 CHECK(soa.Self()->IsExceptionPending()) << descriptor;
2858 FilterDexFileCaughtExceptions(soa.Self(), this);
2859 } else {
2860 DCHECK(!soa.Self()->IsExceptionPending());
2861 }
2862 }
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002863 // A BaseDexClassLoader is always a known lookup.
2864 return true;
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002865}
2866
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002867ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
2868 const char* descriptor,
2869 Handle<mirror::ClassLoader> class_loader) {
Elliott Hughesba8eee12012-01-24 20:25:24 -08002870 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
Ian Rogers98379392014-02-24 16:53:16 -08002871 DCHECK(self != nullptr);
Ian Rogers00f7d0e2012-07-19 15:28:27 -07002872 self->AssertNoPendingException();
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07002873 self->PoisonObjectPointers(); // For DefineClass, CreateArrayClass, etc...
Elliott Hughesc3b77c72011-12-15 20:56:48 -08002874 if (descriptor[1] == '\0') {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08002875 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
2876 // for primitive classes that aren't backed by dex files.
2877 return FindPrimitiveClass(descriptor[0]);
2878 }
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002879 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Brian Carlstromaded5f72011-10-07 17:15:04 -07002880 // Find the class in the loaded classes table.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002881 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
Ian Rogers68b56852014-08-29 20:19:11 -07002882 if (klass != nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002883 return EnsureResolved(self, descriptor, klass);
Brian Carlstromaded5f72011-10-07 17:15:04 -07002884 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07002885 // Class is not yet loaded.
Andreas Gampefa4333d2017-02-14 11:10:34 -08002886 if (descriptor[0] != '[' && class_loader == nullptr) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002887 // Non-array class and the boot class loader, search the boot class path.
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002888 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
Ian Rogers68b56852014-08-29 20:19:11 -07002889 if (pair.second != nullptr) {
Mathieu Chartier9865bde2015-12-21 09:58:16 -08002890 return DefineClass(self,
2891 descriptor,
2892 hash,
2893 ScopedNullHandle<mirror::ClassLoader>(),
2894 *pair.first,
Ian Rogers7b078e82014-09-10 14:44:24 -07002895 *pair.second);
Ian Rogers63557452014-06-04 16:57:15 -07002896 } else {
2897 // The boot class loader is searched ahead of the application class loader, failures are
2898 // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
2899 // trigger the chaining with a proper stack trace.
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002900 ObjPtr<mirror::Throwable> pre_allocated =
2901 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002902 self->SetException(pre_allocated);
Ian Rogers63557452014-06-04 16:57:15 -07002903 return nullptr;
Jesse Wilson47daf872011-11-23 11:42:45 -05002904 }
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002905 }
2906 ObjPtr<mirror::Class> result_ptr;
2907 bool descriptor_equals;
2908 if (descriptor[0] == '[') {
2909 result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
2910 DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
2911 DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
2912 descriptor_equals = true;
Jesse Wilson47daf872011-11-23 11:42:45 -05002913 } else {
Ian Rogers98379392014-02-24 16:53:16 -08002914 ScopedObjectAccessUnchecked soa(self);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002915 bool known_hierarchy =
2916 FindClassInBaseDexClassLoader(soa, self, descriptor, hash, class_loader, &result_ptr);
2917 if (result_ptr != nullptr) {
2918 // The chain was understood and we found the class. We still need to add the class to
2919 // the class table to protect from racy programs that can try and redefine the path list
2920 // which would change the Class<?> returned for subsequent evaluation of const-class.
2921 DCHECK(known_hierarchy);
2922 DCHECK(result_ptr->DescriptorEquals(descriptor));
2923 descriptor_equals = true;
Andreas Gampe501c3b02019-04-17 21:54:27 +00002924 } else if (!self->IsExceptionPending()) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002925 // Either the chain wasn't understood or the class wasn't found.
Andreas Gampe501c3b02019-04-17 21:54:27 +00002926 // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
2927 // we should return it instead of silently clearing and retrying.
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002928 //
2929 // If the chain was understood but we did not find the class, let the Java-side
2930 // rediscover all this and throw the exception with the right stack trace. Note that
2931 // the Java-side could still succeed for racy programs if another thread is actively
2932 // modifying the class loader's path list.
Andreas Gampef865ea92015-04-13 22:14:19 -07002933
Alex Light185a4612018-10-04 15:54:25 -07002934 // The runtime is not allowed to call into java from a runtime-thread so just abort.
Alex Lighte9f61032018-09-24 16:04:51 -07002935 if (self->IsRuntimeThread()) {
Calin Juravleccd56952016-12-15 17:57:38 +00002936 // Oops, we can't call into java so we can't run actual class-loader code.
2937 // This is true for e.g. for the compiler (jit or aot).
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002938 ObjPtr<mirror::Throwable> pre_allocated =
2939 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
2940 self->SetException(pre_allocated);
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00002941 return nullptr;
2942 }
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002943
Vladimir Marko5fdd7782017-04-20 11:26:03 +01002944 // Inlined DescriptorToDot(descriptor) with extra validation.
2945 //
2946 // Throw NoClassDefFoundError early rather than potentially load a class only to fail
2947 // the DescriptorEquals() check below and give a confusing error message. For example,
2948 // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
2949 // instead of "Ljava/lang/String;", the message below using the "dot" names would be
2950 // "class loader [...] returned class java.lang.String instead of java.lang.String".
2951 size_t descriptor_length = strlen(descriptor);
2952 if (UNLIKELY(descriptor[0] != 'L') ||
2953 UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
2954 UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
2955 ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
2956 return nullptr;
2957 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00002958
Vladimir Marko5fdd7782017-04-20 11:26:03 +01002959 std::string class_name_string(descriptor + 1, descriptor_length - 2);
2960 std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
Andreas Gampe87658f32019-04-18 18:39:02 +00002961 if (known_hierarchy &&
2962 fast_class_not_found_exceptions_ &&
2963 !Runtime::Current()->IsJavaDebuggable()) {
2964 // For known hierarchy, we know that the class is going to throw an exception. If we aren't
2965 // debuggable, optimize this path by throwing directly here without going back to Java
2966 // language. This reduces how many ClassNotFoundExceptions happen.
2967 self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
2968 "%s",
2969 class_name_string.c_str());
2970 } else {
2971 ScopedLocalRef<jobject> class_loader_object(
2972 soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get()));
2973 ScopedLocalRef<jobject> result(soa.Env(), nullptr);
2974 {
Vladimir Markoddf4fd32021-11-22 16:31:57 +00002975 ScopedThreadStateChange tsc(self, ThreadState::kNative);
Andreas Gampe87658f32019-04-18 18:39:02 +00002976 ScopedLocalRef<jobject> class_name_object(
2977 soa.Env(), soa.Env()->NewStringUTF(class_name_string.c_str()));
2978 if (class_name_object.get() == nullptr) {
2979 DCHECK(self->IsExceptionPending()); // OOME.
2980 return nullptr;
2981 }
2982 CHECK(class_loader_object.get() != nullptr);
2983 result.reset(soa.Env()->CallObjectMethod(class_loader_object.get(),
2984 WellKnownClasses::java_lang_ClassLoader_loadClass,
2985 class_name_object.get()));
2986 }
2987 if (result.get() == nullptr && !self->IsExceptionPending()) {
2988 // broken loader - throw NPE to be compatible with Dalvik
2989 ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
2990 class_name_string.c_str()).c_str());
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002991 return nullptr;
2992 }
Andreas Gampe87658f32019-04-18 18:39:02 +00002993 result_ptr = soa.Decode<mirror::Class>(result.get());
2994 // Check the name of the returned class.
2995 descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002996 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00002997 } else {
2998 DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00002999 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07003000 }
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003001
3002 if (self->IsExceptionPending()) {
3003 // If the ClassLoader threw or array class allocation failed, pass that exception up.
3004 // However, to comply with the RI behavior, first check if another thread succeeded.
3005 result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
3006 if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3007 self->ClearException();
3008 return EnsureResolved(self, descriptor, result_ptr);
3009 }
3010 return nullptr;
3011 }
3012
3013 // Try to insert the class to the class table, checking for mismatch.
3014 ObjPtr<mirror::Class> old;
3015 {
3016 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3017 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3018 old = class_table->Lookup(descriptor, hash);
3019 if (old == nullptr) {
3020 old = result_ptr; // For the comparison below, after releasing the lock.
3021 if (descriptor_equals) {
Vladimir Markobcf17522018-06-01 13:14:32 +01003022 class_table->InsertWithHash(result_ptr, hash);
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003023 WriteBarrier::ForEveryFieldWrite(class_loader.Get());
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003024 } // else throw below, after releasing the lock.
3025 }
3026 }
3027 if (UNLIKELY(old != result_ptr)) {
3028 // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3029 // capable class loaders. (All class loaders are considered parallel capable on Android.)
Vladimir Markodfc0de72019-04-01 10:57:55 +01003030 ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003031 const char* loader_class_name =
3032 loader_class->GetDexFile().StringByTypeIdx(loader_class->GetDexTypeIndex());
3033 LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3034 << " is not well-behaved; it returned a different Class for racing loadClass(\""
3035 << DescriptorToDot(descriptor) << "\").";
3036 return EnsureResolved(self, descriptor, old);
3037 }
3038 if (UNLIKELY(!descriptor_equals)) {
3039 std::string result_storage;
3040 const char* result_name = result_ptr->GetDescriptor(&result_storage);
3041 std::string loader_storage;
3042 const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3043 ThrowNoClassDefFoundError(
3044 "Initiating class loader of type %s returned class %s instead of %s.",
3045 DescriptorToDot(loader_class_name).c_str(),
3046 DescriptorToDot(result_name).c_str(),
3047 DescriptorToDot(descriptor).c_str());
3048 return nullptr;
3049 }
Vladimir Markobcf17522018-06-01 13:14:32 +01003050 // Success.
3051 return result_ptr;
Brian Carlstromaded5f72011-10-07 17:15:04 -07003052}
3053
Alex Light270db1c2019-12-03 12:20:01 +00003054// Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3055// define-class and how many recursive DefineClasses we are at in order to allow for doing things
3056// like pausing class definition.
3057struct ScopedDefiningClass {
3058 public:
3059 explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3060 : self_(self), returned_(false) {
3061 Locks::mutator_lock_->AssertSharedHeld(self_);
3062 Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3063 self_->IncrDefineClassCount();
3064 }
3065 ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3066 Locks::mutator_lock_->AssertSharedHeld(self_);
3067 CHECK(returned_);
3068 }
3069
3070 ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3071 REQUIRES_SHARED(Locks::mutator_lock_) {
3072 CHECK(!returned_);
3073 self_->DecrDefineClassCount();
3074 Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3075 Thread::PoisonObjectPointersIfDebug();
3076 returned_ = true;
3077 return h_klass.Get();
3078 }
3079
3080 ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3081 REQUIRES_SHARED(Locks::mutator_lock_) {
3082 StackHandleScope<1> hs(self_);
3083 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3084 return Finish(h_klass);
3085 }
3086
3087 ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
3088 REQUIRES_SHARED(Locks::mutator_lock_) {
3089 ScopedNullHandle<mirror::Class> snh;
3090 return Finish(snh);
3091 }
3092
3093 private:
3094 Thread* self_;
3095 bool returned_;
3096};
3097
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01003098ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3099 const char* descriptor,
3100 size_t hash,
3101 Handle<mirror::ClassLoader> class_loader,
3102 const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003103 const dex::ClassDef& dex_class_def) {
Alex Light270db1c2019-12-03 12:20:01 +00003104 ScopedDefiningClass sdc(self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003105 StackHandleScope<3> hs(self);
Eric Holk74584e62021-02-18 14:39:17 -08003106 metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003107 auto klass = hs.NewHandle<mirror::Class>(nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003108
Brian Carlstromaded5f72011-10-07 17:15:04 -07003109 // Load the class from the dex file.
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003110 if (UNLIKELY(!init_done_)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003111 // finish up init of hand crafted class_roots_
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003112 if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003113 klass.Assign(GetClassRoot<mirror::Object>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003114 } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003115 klass.Assign(GetClassRoot<mirror::Class>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003116 } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003117 klass.Assign(GetClassRoot<mirror::String>(this));
Fred Shih4ee7a662014-07-11 09:59:27 -07003118 } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003119 klass.Assign(GetClassRoot<mirror::Reference>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003120 } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003121 klass.Assign(GetClassRoot<mirror::DexCache>(this));
Alex Lightd6251582016-10-31 11:12:30 -07003122 } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003123 klass.Assign(GetClassRoot<mirror::ClassExt>(this));
Brian Carlstromaded5f72011-10-07 17:15:04 -07003124 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003125 }
3126
Calin Juravle33787682019-07-26 14:27:18 -07003127 // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3128 // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3129 // public class path then we prevent the definition of the class.
3130 //
3131 // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3132 // classpath is not checked.
3133 if (class_loader == nullptr &&
3134 Runtime::Current()->IsAotCompiler() &&
3135 DenyAccessBasedOnPublicSdk(descriptor)) {
3136 ObjPtr<mirror::Throwable> pre_allocated =
3137 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3138 self->SetException(pre_allocated);
3139 return sdc.Finish(nullptr);
3140 }
3141
Alex Lighte9f61032018-09-24 16:04:51 -07003142 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3143 // code to be executed. We put it up here so we can avoid all the allocations associated with
3144 // creating the class. This can happen with (eg) jit threads.
3145 if (!self->CanLoadClasses()) {
3146 // Make sure we don't try to load anything, potentially causing an infinite loop.
3147 ObjPtr<mirror::Throwable> pre_allocated =
3148 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3149 self->SetException(pre_allocated);
Alex Light270db1c2019-12-03 12:20:01 +00003150 return sdc.Finish(nullptr);
Alex Lighte9f61032018-09-24 16:04:51 -07003151 }
3152
Andreas Gampefa4333d2017-02-14 11:10:34 -08003153 if (klass == nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003154 // Allocate a class with the status of not ready.
3155 // Interface object should get the right size here. Regular class will
3156 // figure out the right size later and be replaced with one of the right
3157 // size when the class becomes resolved.
Chang Xing0c2c2222017-08-04 14:36:17 -07003158 if (CanAllocClass()) {
3159 klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3160 } else {
Alex Light270db1c2019-12-03 12:20:01 +00003161 return sdc.Finish(nullptr);
Chang Xing0c2c2222017-08-04 14:36:17 -07003162 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07003163 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08003164 if (UNLIKELY(klass == nullptr)) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003165 self->AssertPendingOOMException();
Alex Light270db1c2019-12-03 12:20:01 +00003166 return sdc.Finish(nullptr);
Ian Rogersa436fde2013-08-27 23:34:06 -07003167 }
Alex Lightb0f11922017-01-23 14:25:17 -08003168 // Get the real dex file. This will return the input if there aren't any callbacks or they do
3169 // nothing.
3170 DexFile const* new_dex_file = nullptr;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003171 dex::ClassDef const* new_class_def = nullptr;
Alex Lightb0f11922017-01-23 14:25:17 -08003172 // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3173 // will only be called once.
3174 Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3175 klass,
3176 class_loader,
3177 dex_file,
3178 dex_class_def,
3179 &new_dex_file,
3180 &new_class_def);
Alex Light440b5d92017-01-24 15:32:25 -08003181 // Check to see if an exception happened during runtime callbacks. Return if so.
3182 if (self->IsExceptionPending()) {
Alex Light270db1c2019-12-03 12:20:01 +00003183 return sdc.Finish(nullptr);
Alex Light440b5d92017-01-24 15:32:25 -08003184 }
Alex Lightb0f11922017-01-23 14:25:17 -08003185 ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003186 if (dex_cache == nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00003187 self->AssertPendingException();
Alex Light270db1c2019-12-03 12:20:01 +00003188 return sdc.Finish(nullptr);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003189 }
3190 klass->SetDexCache(dex_cache);
Alex Lightb0f11922017-01-23 14:25:17 -08003191 SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
Mathieu Chartierc7853442015-03-27 14:35:38 -07003192
Jeff Hao848f70a2014-01-15 13:49:50 -08003193 // Mark the string class by setting its access flag.
3194 if (UNLIKELY(!init_done_)) {
3195 if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3196 klass->SetStringClass();
3197 }
3198 }
3199
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07003200 ObjectLock<mirror::Class> lock(self, klass);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003201 klass->SetClinitThreadId(self->GetTid());
Mathieu Chartier1e4841e2016-12-15 14:21:04 -08003202 // Make sure we have a valid empty iftable even if there are errors.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003203 klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003204
Mathieu Chartier590fee92013-09-13 13:46:47 -07003205 // Add the newly loaded class to the loaded classes table.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003206 ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
Ian Rogersc114b5f2014-07-21 08:55:01 -07003207 if (existing != nullptr) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07003208 // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3209 // this thread to block.
Alex Light270db1c2019-12-03 12:20:01 +00003210 return sdc.Finish(EnsureResolved(self, descriptor, existing));
Brian Carlstromaded5f72011-10-07 17:15:04 -07003211 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003212
Mathieu Chartierc7853442015-03-27 14:35:38 -07003213 // Load the fields and other things after we are inserted in the table. This is so that we don't
3214 // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3215 // other reason is that the field roots are only visited from the class table. So we need to be
3216 // inserted before we allocate / fill in these fields.
Alex Lightb0f11922017-01-23 14:25:17 -08003217 LoadClass(self, *new_dex_file, *new_class_def, klass);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003218 if (self->IsExceptionPending()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003219 VLOG(class_linker) << self->GetException()->Dump();
Mathieu Chartierc7853442015-03-27 14:35:38 -07003220 // An exception occured during load, set status to erroneous while holding klass' lock in case
3221 // notification is necessary.
3222 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003223 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003224 }
Alex Light270db1c2019-12-03 12:20:01 +00003225 return sdc.Finish(nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003226 }
3227
Brian Carlstromaded5f72011-10-07 17:15:04 -07003228 // Finish loading (if necessary) by finding parents
3229 CHECK(!klass->IsLoaded());
Alex Lightb0f11922017-01-23 14:25:17 -08003230 if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003231 // Loading failed.
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003232 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003233 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003234 }
Alex Light270db1c2019-12-03 12:20:01 +00003235 return sdc.Finish(nullptr);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003236 }
3237 CHECK(klass->IsLoaded());
Andreas Gampe0f01b582017-01-18 15:22:37 -08003238
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07003239 // At this point the class is loaded. Publish a ClassLoad event.
Andreas Gampe0f01b582017-01-18 15:22:37 -08003240 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
Andreas Gampeac30fa22017-01-18 21:02:36 -08003241 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
Andreas Gampe0f01b582017-01-18 15:22:37 -08003242
Brian Carlstromaded5f72011-10-07 17:15:04 -07003243 // Link the class (if necessary)
3244 CHECK(!klass->IsResolved());
Mathieu Chartier590fee92013-09-13 13:46:47 -07003245 // TODO: Use fast jobjects?
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003246 auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003247
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07003248 MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
Igor Murashkinb1d8c312015-08-04 11:18:43 -07003249 if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003250 // Linking failed.
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003251 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003252 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003253 }
Alex Light270db1c2019-12-03 12:20:01 +00003254 return sdc.Finish(nullptr);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003255 }
Mathieu Chartier524507a2014-08-27 15:28:28 -07003256 self->AssertNoPendingException();
Andreas Gampefa4333d2017-02-14 11:10:34 -08003257 CHECK(h_new_class != nullptr) << descriptor;
Vladimir Markof9e82e52021-10-06 08:34:40 +01003258 CHECK(h_new_class->IsResolved()) << descriptor << " " << h_new_class->GetStatus();
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003259
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003260 // Instrumentation may have updated entrypoints for all methods of all
3261 // classes. However it could not update methods of this class while we
3262 // were loading it. Now the class is resolved, we can update entrypoints
3263 // as required by instrumentation.
3264 if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) {
3265 // We must be in the kRunnable state to prevent instrumentation from
3266 // suspending all threads to update entrypoints while we are doing it
3267 // for this class.
Vladimir Markoddf4fd32021-11-22 16:31:57 +00003268 DCHECK_EQ(self->GetState(), ThreadState::kRunnable);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07003269 Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003270 }
3271
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003272 /*
3273 * We send CLASS_PREPARE events to the debugger from here. The
3274 * definition of "preparation" is creating the static fields for a
3275 * class and initializing them to the standard default values, but not
3276 * executing any code (that comes later, during "initialization").
3277 *
3278 * We did the static preparation in LinkClass.
3279 *
3280 * The class has been prepared and resolved but possibly not yet verified
3281 * at this point.
3282 */
Andreas Gampeac30fa22017-01-18 21:02:36 -08003283 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003284
Tamas Berghammer160e6df2016-01-05 14:29:02 +00003285 // Notify native debugger of the new class and its layout.
3286 jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3287
Alex Light270db1c2019-12-03 12:20:01 +00003288 return sdc.Finish(h_new_class);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07003289}
3290
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003291uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003292 const dex::ClassDef& dex_class_def) {
Brian Carlstrom4873d462011-08-21 15:23:39 -07003293 size_t num_ref = 0;
Fred Shih37f05ef2014-07-16 18:38:08 -07003294 size_t num_8 = 0;
3295 size_t num_16 = 0;
Brian Carlstrom4873d462011-08-21 15:23:39 -07003296 size_t num_32 = 0;
3297 size_t num_64 = 0;
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003298 ClassAccessor accessor(dex_file, dex_class_def);
3299 // We allow duplicate definitions of the same field in a class_data_item
3300 // but ignore the repeated indexes here, b/21868015.
3301 uint32_t last_field_idx = dex::kDexNoIndex;
3302 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3303 uint32_t field_idx = field.GetIndex();
3304 // Ordering enforced by DexFileVerifier.
3305 DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3306 if (UNLIKELY(field_idx == last_field_idx)) {
3307 continue;
3308 }
3309 last_field_idx = field_idx;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003310 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003311 const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3312 char c = descriptor[0];
3313 switch (c) {
3314 case 'L':
3315 case '[':
3316 num_ref++;
3317 break;
3318 case 'J':
3319 case 'D':
3320 num_64++;
3321 break;
3322 case 'I':
3323 case 'F':
3324 num_32++;
3325 break;
3326 case 'S':
3327 case 'C':
3328 num_16++;
3329 break;
3330 case 'B':
3331 case 'Z':
3332 num_8++;
3333 break;
3334 default:
3335 LOG(FATAL) << "Unknown descriptor: " << c;
3336 UNREACHABLE();
Brian Carlstrom4873d462011-08-21 15:23:39 -07003337 }
3338 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003339 return mirror::Class::ComputeClassSize(false,
3340 0,
3341 num_8,
3342 num_16,
3343 num_32,
3344 num_64,
3345 num_ref,
Mathieu Chartiere401d142015-04-22 13:56:20 -07003346 image_pointer_size_);
Brian Carlstrom4873d462011-08-21 15:23:39 -07003347}
3348
Vladimir Marko86c87522020-05-11 16:55:55 +01003349void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
Alex Light2d441b12018-06-08 15:33:21 -07003350 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Vladimir Markocce414f2019-10-07 08:51:33 +01003351 DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
Vladimir Marko86c87522020-05-11 16:55:55 +01003352 size_t num_direct_methods = klass->NumDirectMethods();
3353 if (num_direct_methods == 0) {
Ian Rogers1c829822013-09-30 18:18:50 -07003354 return; // No direct methods => no static methods.
Ian Rogers19846512012-02-24 11:42:47 -08003355 }
Vladimir Markocce414f2019-10-07 08:51:33 +01003356 if (UNLIKELY(klass->IsProxyClass())) {
3357 return;
3358 }
Vladimir Marko86c87522020-05-11 16:55:55 +01003359 PointerSize pointer_size = image_pointer_size_;
3360 if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3361 klass->GetDirectMethods(pointer_size).end(),
3362 [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3363 // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3364 // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3365 ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3366 ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3367 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3368 auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3369 while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3370 lb->first->SetEntryPointFromJni(lb->second);
3371 lb = critical_native_code_with_clinit_check_.erase(lb);
3372 }
3373 }
Ian Rogers62d6c772013-02-27 08:32:07 -08003374 Runtime* runtime = Runtime::Current();
Nicolas Geoffray854af032021-12-21 08:32:42 +00003375 if (runtime->IsAotCompiler()) {
3376 // We should not update entrypoints when running the transactional
3377 // interpreter.
3378 return;
Ian Rogers19846512012-02-24 11:42:47 -08003379 }
Alex Light64ad14d2014-08-19 14:23:13 -07003380
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +00003381 instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
Ian Rogers1c829822013-09-30 18:18:50 -07003382 // Link the code of methods skipped by LinkCode.
Vladimir Marko86c87522020-05-11 16:55:55 +01003383 for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3384 ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003385 if (!method->IsStatic()) {
3386 // Only update static methods.
3387 continue;
Ian Rogers19846512012-02-24 11:42:47 -08003388 }
Nicolas Geoffray854af032021-12-21 08:32:42 +00003389 instrumentation->UpdateMethodsCode(method, instrumentation->GetCodeForInvoke(method));
Ian Rogers19846512012-02-24 11:42:47 -08003390 }
Ian Rogers62d6c772013-02-27 08:32:07 -08003391 // Ignore virtual methods on the iterator.
Ian Rogers19846512012-02-24 11:42:47 -08003392}
3393
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003394// Does anything needed to make sure that the compiler will not generate a direct invoke to this
3395// method. Should only be called on non-invokable methods.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +00003396inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3397 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light9139e002015-10-09 15:59:48 -07003398 DCHECK(method != nullptr);
3399 DCHECK(!method->IsInvokable());
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003400 method->SetEntryPointFromQuickCompiledCodePtrSize(
3401 class_linker->GetQuickToInterpreterBridgeTrampoline(),
3402 class_linker->GetImagePointerSize());
Alex Light9139e002015-10-09 15:59:48 -07003403}
3404
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003405static void LinkCode(ClassLinker* class_linker,
3406 ArtMethod* method,
3407 const OatFile::OatClass* oat_class,
3408 uint32_t class_def_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light2d441b12018-06-08 15:33:21 -07003409 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003410 Runtime* const runtime = Runtime::Current();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08003411 if (runtime->IsAotCompiler()) {
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003412 // The following code only applies to a non-compiler runtime.
3413 return;
3414 }
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003415
Ian Rogers62d6c772013-02-27 08:32:07 -08003416 // Method shouldn't have already been linked.
Ian Rogersef7d42f2014-01-06 12:55:46 -08003417 DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr);
Nicolas Geoffray854af032021-12-21 08:32:42 +00003418 DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized()); // Actually ClassStatus::Idx.
Jeff Hao16743632013-05-08 10:59:04 -07003419
Alex Light9139e002015-10-09 15:59:48 -07003420 if (!method->IsInvokable()) {
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003421 EnsureThrowsInvocationError(class_linker, method);
Brian Carlstrom92827a52011-10-10 15:50:01 -07003422 return;
3423 }
Ian Rogers19846512012-02-24 11:42:47 -08003424
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003425 const void* quick_code = nullptr;
3426 if (oat_class != nullptr) {
3427 // Every kind of method should at least get an invoke stub from the oat_method.
3428 // non-abstract methods also get their code pointers.
3429 const OatFile::OatMethod oat_method = oat_class->GetOatMethod(class_def_method_index);
3430 quick_code = oat_method.GetQuickCode();
3431 }
Nicolas Geoffray854af032021-12-21 08:32:42 +00003432 runtime->GetInstrumentation()->InitializeMethodsCode(method, quick_code);
jeffhao26c0a1a2012-01-17 16:28:33 -08003433
Ian Rogers62d6c772013-02-27 08:32:07 -08003434 if (method->IsNative()) {
Vladimir Marko86c87522020-05-11 16:55:55 +01003435 // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3436 // as the extra processing for @CriticalNative is not needed yet.
3437 method->SetEntryPointFromJni(
3438 method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
Brian Carlstrom92827a52011-10-10 15:50:01 -07003439 }
3440}
3441
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003442void ClassLinker::SetupClass(const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003443 const dex::ClassDef& dex_class_def,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003444 Handle<mirror::Class> klass,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003445 ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08003446 CHECK(klass != nullptr);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003447 CHECK(klass->GetDexCache() != nullptr);
Vladimir Marko2c64a832018-01-04 11:31:56 +00003448 CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
Brian Carlstromf615a612011-07-23 12:50:34 -07003449 const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003450 CHECK(descriptor != nullptr);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003451
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003452 klass->SetClass(GetClassRoot<mirror::Class>(this));
Andreas Gampe51829322014-08-25 15:05:04 -07003453 uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
Brian Carlstrom8e3fb142013-10-09 21:00:27 -07003454 CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
Vladimir Markob68bb7a2020-03-17 10:55:25 +00003455 klass->SetAccessFlagsDuringLinking(access_flags);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07003456 klass->SetClassLoader(class_loader);
Ian Rogersc2b44472011-12-14 21:17:17 -08003457 DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
Vladimir Marko2c64a832018-01-04 11:31:56 +00003458 mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003459
Ian Rogers8b2c0b92013-09-19 02:56:49 -07003460 klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003461 klass->SetDexTypeIndex(dex_class_def.class_idx_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003462}
Brian Carlstrom934486c2011-07-12 23:42:50 -07003463
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003464LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3465 LinearAlloc* allocator,
3466 size_t length) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003467 if (length == 0) {
3468 return nullptr;
3469 }
Vladimir Markocf36d492015-08-12 19:27:26 +01003470 // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3471 static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3472 size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003473 void* array_storage = allocator->Alloc(self, storage_size);
Vladimir Markocf36d492015-08-12 19:27:26 +01003474 auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003475 CHECK(ret != nullptr);
3476 std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3477 return ret;
Mathieu Chartierc7853442015-03-27 14:35:38 -07003478}
3479
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003480LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3481 LinearAlloc* allocator,
3482 size_t length) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003483 if (length == 0) {
3484 return nullptr;
Mathieu Chartiere401d142015-04-22 13:56:20 -07003485 }
Vladimir Marko14632852015-08-17 12:07:23 +01003486 const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3487 const size_t method_size = ArtMethod::Size(image_pointer_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +01003488 const size_t storage_size =
3489 LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003490 void* array_storage = allocator->Alloc(self, storage_size);
Vladimir Markocf36d492015-08-12 19:27:26 +01003491 auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003492 CHECK(ret != nullptr);
3493 for (size_t i = 0; i < length; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +01003494 new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003495 }
3496 return ret;
Mathieu Chartiere401d142015-04-22 13:56:20 -07003497}
3498
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003499LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003500 if (class_loader == nullptr) {
3501 return Runtime::Current()->GetLinearAlloc();
3502 }
3503 LinearAlloc* allocator = class_loader->GetAllocator();
3504 DCHECK(allocator != nullptr);
3505 return allocator;
3506}
3507
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003508LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartierd57d4542015-10-14 10:55:30 -07003509 if (class_loader == nullptr) {
3510 return Runtime::Current()->GetLinearAlloc();
3511 }
3512 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3513 LinearAlloc* allocator = class_loader->GetAllocator();
3514 if (allocator == nullptr) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08003515 RegisterClassLoader(class_loader);
3516 allocator = class_loader->GetAllocator();
3517 CHECK(allocator != nullptr);
Mathieu Chartierd57d4542015-10-14 10:55:30 -07003518 }
3519 return allocator;
3520}
3521
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003522void ClassLinker::LoadClass(Thread* self,
3523 const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003524 const dex::ClassDef& dex_class_def,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003525 Handle<mirror::Class> klass) {
David Brazdil20c765f2018-10-27 21:45:15 +00003526 ClassAccessor accessor(dex_file,
3527 dex_class_def,
3528 /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003529 if (!accessor.HasClassData()) {
3530 return;
3531 }
3532 Runtime* const runtime = Runtime::Current();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003533 {
3534 // Note: We cannot have thread suspension until the field and method arrays are setup or else
3535 // Class::VisitFieldRoots may miss some fields or methods.
Mathieu Chartier268764d2016-09-13 12:09:38 -07003536 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003537 // Load static fields.
Vladimir Marko23682bf2015-06-24 14:28:03 +01003538 // We allow duplicate definitions of the same field in a class_data_item
3539 // but ignore the repeated indexes here, b/21868015.
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003540 LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003541 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3542 allocator,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003543 accessor.NumStaticFields());
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003544 LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3545 allocator,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003546 accessor.NumInstanceFields());
3547 size_t num_sfields = 0u;
Vladimir Marko23682bf2015-06-24 14:28:03 +01003548 size_t num_ifields = 0u;
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003549 uint32_t last_static_field_idx = 0u;
3550 uint32_t last_instance_field_idx = 0u;
Orion Hodsonc069a302017-01-18 09:23:12 +00003551
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003552 // Methods
3553 bool has_oat_class = false;
3554 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3555 ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3556 : OatFile::OatClass::Invalid();
3557 const OatFile::OatClass* oat_class_ptr = has_oat_class ? &oat_class : nullptr;
3558 klass->SetMethodsPtr(
3559 AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3560 accessor.NumDirectMethods(),
3561 accessor.NumVirtualMethods());
3562 size_t class_def_method_index = 0;
3563 uint32_t last_dex_method_index = dex::kDexNoIndex;
3564 size_t last_class_def_method_index = 0;
3565
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00003566 uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003567 // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3568 // methods needs to decode all of the fields.
3569 accessor.VisitFieldsAndMethods([&](
3570 const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3571 uint32_t field_idx = field.GetIndex();
3572 DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier.
3573 if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
3574 LoadField(field, klass, &sfields->At(num_sfields));
3575 ++num_sfields;
3576 last_static_field_idx = field_idx;
3577 }
3578 }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3579 uint32_t field_idx = field.GetIndex();
3580 DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier.
3581 if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
3582 LoadField(field, klass, &ifields->At(num_ifields));
3583 ++num_ifields;
3584 last_instance_field_idx = field_idx;
3585 }
3586 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3587 ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
3588 image_pointer_size_);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003589 LoadMethod(dex_file, method, klass.Get(), art_method);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003590 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3591 uint32_t it_method_index = method.GetIndex();
3592 if (last_dex_method_index == it_method_index) {
3593 // duplicate case
3594 art_method->SetMethodIndex(last_class_def_method_index);
3595 } else {
3596 art_method->SetMethodIndex(class_def_method_index);
3597 last_dex_method_index = it_method_index;
3598 last_class_def_method_index = class_def_method_index;
3599 }
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00003600 art_method->ResetCounter(hotness_threshold);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003601 ++class_def_method_index;
3602 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3603 ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
3604 class_def_method_index - accessor.NumDirectMethods(),
3605 image_pointer_size_);
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00003606 art_method->ResetCounter(hotness_threshold);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003607 LoadMethod(dex_file, method, klass.Get(), art_method);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003608 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3609 ++class_def_method_index;
3610 });
3611
3612 if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
David Sehr709b0702016-10-13 09:12:37 -07003613 LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003614 << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
3615 << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
3616 << ")";
Vladimir Marko81819db2015-11-05 15:30:12 +00003617 // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
3618 if (sfields != nullptr) {
3619 sfields->SetSize(num_sfields);
3620 }
3621 if (ifields != nullptr) {
3622 ifields->SetSize(num_ifields);
3623 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07003624 }
Vladimir Marko81819db2015-11-05 15:30:12 +00003625 // Set the field arrays.
3626 klass->SetSFieldsPtr(sfields);
3627 DCHECK_EQ(klass->NumStaticFields(), num_sfields);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003628 klass->SetIFieldsPtr(ifields);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003629 DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
Ian Rogers0571d352011-11-03 19:51:38 -07003630 }
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07003631 // Ensure that the card is marked so that remembered sets pick up native roots.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003632 WriteBarrier::ForEveryFieldWrite(klass.Get());
Mathieu Chartierf3f2a7a2015-04-14 15:43:10 -07003633 self->AllowThreadSuspension();
Brian Carlstrom934486c2011-07-12 23:42:50 -07003634}
3635
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003636void ClassLinker::LoadField(const ClassAccessor::Field& field,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003637 Handle<mirror::Class> klass,
Mathieu Chartierc7853442015-03-27 14:35:38 -07003638 ArtField* dst) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003639 const uint32_t field_idx = field.GetIndex();
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003640 dst->SetDexFieldIndex(field_idx);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003641 dst->SetDeclaringClass(klass.Get());
David Brazdilf6a8a552018-01-15 18:10:50 +00003642
David Brazdil85865692018-10-30 17:26:20 +00003643 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3644 dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
Brian Carlstrom934486c2011-07-12 23:42:50 -07003645}
3646
Mathieu Chartier268764d2016-09-13 12:09:38 -07003647void ClassLinker::LoadMethod(const DexFile& dex_file,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003648 const ClassAccessor::Method& method,
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003649 ObjPtr<mirror::Class> klass,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003650 ArtMethod* dst) {
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003651 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3652
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003653 const uint32_t dex_method_idx = method.GetIndex();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003654 const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003655 uint32_t name_utf16_length;
3656 const char* method_name = dex_file.StringDataAndUtf16LengthByIdx(method_id.name_idx_,
3657 &name_utf16_length);
3658 std::string_view shorty = dex_file.GetShortyView(dex_file.GetProtoId(method_id.proto_idx_));
Mathieu Chartier66f19252012-09-18 08:57:04 -07003659
Mathieu Chartier66f19252012-09-18 08:57:04 -07003660 dst->SetDexMethodIndex(dex_method_idx);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003661 dst->SetDeclaringClass(klass);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003662
David Brazdil85865692018-10-30 17:26:20 +00003663 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3664 uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
David Brazdilf6a8a552018-01-15 18:10:50 +00003665
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003666 auto has_ascii_name = [method_name, name_utf16_length](const char* ascii_name,
3667 size_t length) ALWAYS_INLINE {
3668 DCHECK_EQ(strlen(ascii_name), length);
3669 return length == name_utf16_length &&
3670 method_name[length] == 0 && // Is `method_name` an ASCII string?
3671 memcmp(ascii_name, method_name, length) == 0;
3672 };
3673 if (UNLIKELY(has_ascii_name("finalize", sizeof("finalize") - 1u))) {
Ian Rogers241b5de2013-10-09 17:58:57 -07003674 // Set finalizable flag on declaring class.
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003675 if (shorty == "V") {
Ian Rogersdfb325e2013-10-30 01:00:44 -07003676 // Void return type.
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003677 if (klass->GetClassLoader() != nullptr) { // All non-boot finalizer methods are flagged.
Ian Rogersdfb325e2013-10-30 01:00:44 -07003678 klass->SetFinalizable();
3679 } else {
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003680 std::string_view klass_descriptor =
3681 dex_file.GetTypeDescriptorView(dex_file.GetTypeId(klass->GetDexTypeIndex()));
Ian Rogersdfb325e2013-10-30 01:00:44 -07003682 // The Enum class declares a "final" finalize() method to prevent subclasses from
3683 // introducing a finalizer. We don't want to set the finalizable flag for Enum or its
3684 // subclasses, so we exclude it here.
3685 // We also want to avoid setting the flag on Object, where we know that finalize() is
3686 // empty.
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003687 if (klass_descriptor != "Ljava/lang/Object;" &&
3688 klass_descriptor != "Ljava/lang/Enum;") {
Ian Rogers241b5de2013-10-09 17:58:57 -07003689 klass->SetFinalizable();
Ian Rogers241b5de2013-10-09 17:58:57 -07003690 }
3691 }
3692 }
3693 } else if (method_name[0] == '<') {
3694 // Fix broken access flags for initializers. Bug 11157540.
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003695 bool is_init = has_ascii_name("<init>", sizeof("<init>") - 1u);
3696 bool is_clinit = has_ascii_name("<clinit>", sizeof("<clinit>") - 1u);
Ian Rogers241b5de2013-10-09 17:58:57 -07003697 if (UNLIKELY(!is_init && !is_clinit)) {
3698 LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
3699 } else {
3700 if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
3701 LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
David Sehr709b0702016-10-13 09:12:37 -07003702 << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
Ian Rogers241b5de2013-10-09 17:58:57 -07003703 access_flags |= kAccConstructor;
3704 }
3705 }
3706 }
Vladimir Markoe815aec2022-03-29 15:58:40 +00003707
3708 // Check for nterp invoke fast-path based on shorty.
3709 bool all_parameters_are_reference = true;
3710 bool all_parameters_are_reference_or_int = true;
3711 for (size_t i = 1; i < shorty.length(); ++i) {
3712 if (shorty[i] != 'L') {
3713 all_parameters_are_reference = false;
3714 if (shorty[i] == 'F' || shorty[i] == 'D' || shorty[i] == 'J') {
3715 all_parameters_are_reference_or_int = false;
3716 break;
3717 }
3718 }
3719 }
3720 if (all_parameters_are_reference_or_int && shorty[0] != 'F' && shorty[0] != 'D') {
3721 access_flags |= kAccNterpInvokeFastPathFlag;
3722 }
3723
Vladimir Markob0a6aee2017-10-27 10:34:04 +01003724 if (UNLIKELY((access_flags & kAccNative) != 0u)) {
3725 // Check if the native method is annotated with @FastNative or @CriticalNative.
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003726 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
3727 access_flags |=
3728 annotations::GetNativeMethodAnnotationAccessFlags(dex_file, class_def, dex_method_idx);
3729 dst->SetAccessFlags(access_flags);
3730 DCHECK(!dst->IsAbstract());
3731 DCHECK(!dst->HasCodeItem());
Nicolas Geoffray47171752020-08-31 15:03:20 +01003732 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003733 dst->SetDataPtrSize(nullptr, image_pointer_size_); // JNI stub/trampoline not linked yet.
3734 } else if ((access_flags & kAccAbstract) != 0u) {
3735 dst->SetAccessFlags(access_flags);
3736 // Must be done after SetAccessFlags since IsAbstract depends on it.
3737 DCHECK(dst->IsAbstract());
3738 if (klass->IsInterface()) {
3739 dst->CalculateAndSetImtIndex();
3740 }
3741 DCHECK(!dst->HasCodeItem());
3742 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
3743 dst->SetDataPtrSize(nullptr, image_pointer_size_); // Single implementation not set yet.
3744 } else {
Vladimir Markoe815aec2022-03-29 15:58:40 +00003745 // Check for nterp entry fast-path based on shorty.
3746 if (all_parameters_are_reference) {
3747 access_flags |= kAccNterpEntryPointFastPathFlag;
3748 }
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003749 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
3750 if (annotations::MethodIsNeverCompile(dex_file, class_def, dex_method_idx)) {
3751 access_flags |= kAccCompileDontBother;
3752 }
3753 dst->SetAccessFlags(access_flags);
3754 DCHECK(!dst->IsAbstract());
3755 DCHECK(dst->HasCodeItem());
3756 uint32_t code_item_offset = method.GetCodeItemOffset();
3757 DCHECK_NE(code_item_offset, 0u);
3758 if (Runtime::Current()->IsAotCompiler()) {
3759 dst->SetDataPtrSize(reinterpret_cast32<void*>(code_item_offset), image_pointer_size_);
3760 } else {
3761 dst->SetCodeItem(dex_file.GetCodeItem(code_item_offset), dex_file.IsCompactDexFile());
3762 }
Nicolas Geoffray43c9cd72021-03-10 15:09:19 +00003763 }
Nicolas Geoffrayf9ae8e32022-02-15 22:54:11 +00003764
3765 if (Runtime::Current()->IsZygote()) {
3766 dst->SetMemorySharedMethod();
3767 }
Brian Carlstrom934486c2011-07-12 23:42:50 -07003768}
3769
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003770void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
David Srbecky33df0e32021-09-30 14:36:32 +00003771 ObjPtr<mirror::DexCache> dex_cache =
3772 AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr);
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003773 CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
Brian Carlstrom40381fb2011-10-19 14:13:40 -07003774 AppendToBootClassPath(dex_file, dex_cache);
Brian Carlstroma663ea52011-08-19 23:33:41 -07003775}
3776
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003777void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00003778 ObjPtr<mirror::DexCache> dex_cache) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003779 CHECK(dex_file != nullptr);
3780 CHECK(dex_cache != nullptr) << dex_file->GetLocation();
Nicolas Geoffray7913cf32022-02-16 09:28:21 +00003781 CHECK_EQ(dex_cache->GetDexFile(), dex_file) << dex_file->GetLocation();
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003782 boot_class_path_.push_back(dex_file);
Andreas Gampebe7af222017-07-25 09:57:28 -07003783 WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003784 RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003785}
3786
Mathieu Chartierc528dba2013-11-26 12:00:11 -08003787void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00003788 ObjPtr<mirror::DexCache> dex_cache,
3789 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003790 Thread* const self = Thread::Current();
Andreas Gampecc1b5352016-12-01 16:58:38 -08003791 Locks::dex_lock_->AssertExclusiveHeld(self);
Vladimir Markocd556b02017-02-03 11:47:34 +00003792 CHECK(dex_cache != nullptr) << dex_file.GetLocation();
David Srbecky86d6cd52020-12-02 18:13:10 +00003793 CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003794 // For app images, the dex cache location may be a suffix of the dex file location since the
3795 // dex file location is an absolute path.
Mathieu Chartier76172162016-01-26 14:54:06 -08003796 const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
3797 const size_t dex_cache_length = dex_cache_location.length();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003798 CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
3799 std::string dex_file_location = dex_file.GetLocation();
Nicolas Geoffraye3e0f702019-03-12 07:02:02 +00003800 // The following paths checks don't work on preopt when using boot dex files, where the dex
3801 // cache location is the one on device, and the dex_file's location is the one on host.
3802 if (!(Runtime::Current()->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
3803 CHECK_GE(dex_file_location.length(), dex_cache_length)
3804 << dex_cache_location << " " << dex_file.GetLocation();
3805 const std::string dex_file_suffix = dex_file_location.substr(
3806 dex_file_location.length() - dex_cache_length,
3807 dex_cache_length);
3808 // Example dex_cache location is SettingsProvider.apk and
3809 // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
3810 CHECK_EQ(dex_cache_location, dex_file_suffix);
3811 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003812 const OatFile* oat_file =
3813 (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
Vladimir Markob066d432018-01-03 13:14:37 +00003814 // Clean up pass to remove null dex caches; null dex caches can occur due to class unloading
3815 // and we are lazily removing null entries. Also check if we need to initialize OatFile data
3816 // (.data.bimg.rel.ro and .bss sections) needed for code execution.
3817 bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
Ian Rogers55256cb2017-12-21 17:07:11 -08003818 JavaVMExt* const vm = self->GetJniEnv()->GetVm();
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08003819 for (auto it = dex_caches_.begin(); it != dex_caches_.end(); ) {
David Srbecky6fbcc292021-02-23 01:05:32 +00003820 const DexCacheData& data = it->second;
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08003821 if (self->IsJWeakCleared(data.weak_root)) {
3822 vm->DeleteWeakGlobalRef(self, data.weak_root);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003823 it = dex_caches_.erase(it);
3824 } else {
Vladimir Markob066d432018-01-03 13:14:37 +00003825 if (initialize_oat_file_data &&
David Srbecky6fbcc292021-02-23 01:05:32 +00003826 it->first->GetOatDexFile() != nullptr &&
3827 it->first->GetOatDexFile()->GetOatFile() == oat_file) {
Vladimir Markob066d432018-01-03 13:14:37 +00003828 initialize_oat_file_data = false; // Already initialized.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003829 }
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003830 ++it;
3831 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07003832 }
Vladimir Markob066d432018-01-03 13:14:37 +00003833 if (initialize_oat_file_data) {
Vladimir Marko1cedb4a2019-02-06 14:13:28 +00003834 oat_file->InitializeRelocations();
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003835 }
David Brazdila5c3a802019-03-08 14:59:41 +00003836 // Let hiddenapi assign a domain to the newly registered dex file.
3837 hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
3838
Vladimir Markocd556b02017-02-03 11:47:34 +00003839 jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08003840 DexCacheData data;
3841 data.weak_root = dex_cache_jweak;
Vladimir Markocd556b02017-02-03 11:47:34 +00003842 data.class_table = ClassTableForClassLoader(class_loader);
David Srbecky6fbcc292021-02-23 01:05:32 +00003843 AddNativeDebugInfoForDex(self, &dex_file);
Vladimir Markocd556b02017-02-03 11:47:34 +00003844 DCHECK(data.class_table != nullptr);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003845 // Make sure to hold the dex cache live in the class table. This case happens for the boot class
3846 // path dex caches without an image.
3847 data.class_table->InsertStrongRoot(dex_cache);
Andreas Gampe8a1a0f72020-03-03 16:07:45 -08003848 // Make sure that the dex cache holds the classloader live.
3849 dex_cache->SetClassLoader(class_loader);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003850 if (class_loader != nullptr) {
3851 // Since we added a strong root to the class table, do the write barrier as required for
3852 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003853 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003854 }
David Srbecky6fbcc292021-02-23 01:05:32 +00003855 bool inserted = dex_caches_.emplace(&dex_file, std::move(data)).second;
3856 CHECK(inserted);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003857}
3858
Alex Light725da8f2020-02-19 14:46:33 -08003859ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
3860 return data != nullptr
3861 ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
Vladimir Markocd556b02017-02-03 11:47:34 +00003862 : nullptr;
3863}
3864
Alex Light725da8f2020-02-19 14:46:33 -08003865bool ClassLinker::IsSameClassLoader(
Vladimir Markocd556b02017-02-03 11:47:34 +00003866 ObjPtr<mirror::DexCache> dex_cache,
Alex Light725da8f2020-02-19 14:46:33 -08003867 const DexCacheData* data,
Vladimir Markocd556b02017-02-03 11:47:34 +00003868 ObjPtr<mirror::ClassLoader> class_loader) {
Alex Light725da8f2020-02-19 14:46:33 -08003869 CHECK(data != nullptr);
David Srbecky6fbcc292021-02-23 01:05:32 +00003870 DCHECK_EQ(FindDexCacheDataLocked(*dex_cache->GetDexFile()), data);
Alex Light725da8f2020-02-19 14:46:33 -08003871 return data->class_table == ClassTableForClassLoader(class_loader);
Vladimir Markocd556b02017-02-03 11:47:34 +00003872}
3873
Alex Light07f06212017-06-01 14:01:43 -07003874void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
3875 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartiered4ee442018-06-05 14:23:35 -07003876 SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
Alex Light07f06212017-06-01 14:01:43 -07003877 Thread* self = Thread::Current();
3878 StackHandleScope<2> hs(self);
3879 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
3880 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
3881 const DexFile* dex_file = dex_cache->GetDexFile();
3882 DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
3883 if (kIsDebugBuild) {
Alex Light725da8f2020-02-19 14:46:33 -08003884 ReaderMutexLock mu(self, *Locks::dex_lock_);
3885 const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
3886 ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
Alex Light07f06212017-06-01 14:01:43 -07003887 DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
3888 << "been registered on dex file " << dex_file->GetLocation();
3889 }
3890 ClassTable* table;
3891 {
3892 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3893 table = InsertClassTableForClassLoader(h_class_loader.Get());
3894 }
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03003895 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
3896 // a thread holding the dex lock and blocking on a condition variable regarding
3897 // weak references access, and a thread blocking on the dex lock.
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03003898 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
Alex Light07f06212017-06-01 14:01:43 -07003899 WriterMutexLock mu(self, *Locks::dex_lock_);
3900 RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
3901 table->InsertStrongRoot(h_dex_cache.Get());
3902 if (h_class_loader.Get() != nullptr) {
3903 // Since we added a strong root to the class table, do the write barrier as required for
3904 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003905 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
Alex Light07f06212017-06-01 14:01:43 -07003906 }
3907}
3908
Alex Lightde7f8782020-02-24 10:14:22 -08003909static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
3910 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light725da8f2020-02-19 14:46:33 -08003911 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
Alex Lightde7f8782020-02-24 10:14:22 -08003912 "Attempt to register dex file %s with multiple class loaders",
3913 dex_file.GetLocation().c_str());
Alex Light725da8f2020-02-19 14:46:33 -08003914}
3915
Vladimir Markocd556b02017-02-03 11:47:34 +00003916ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
3917 ObjPtr<mirror::ClassLoader> class_loader) {
Ian Rogers1f539342012-10-03 21:09:42 -07003918 Thread* self = Thread::Current();
Alex Light725da8f2020-02-19 14:46:33 -08003919 ObjPtr<mirror::DexCache> old_dex_cache;
3920 bool registered_with_another_class_loader = false;
Brian Carlstrom47d237a2011-10-18 15:08:33 -07003921 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08003922 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08003923 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
3924 old_dex_cache = DecodeDexCacheLocked(self, old_data);
3925 if (old_dex_cache != nullptr) {
3926 if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
3927 return old_dex_cache;
3928 } else {
3929 // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
3930 // be thrown when it's safe to do so to simplify this.
3931 registered_with_another_class_loader = true;
3932 }
3933 }
Vladimir Markocd556b02017-02-03 11:47:34 +00003934 }
Alex Light725da8f2020-02-19 14:46:33 -08003935 // We need to have released the dex_lock_ to allocate safely.
3936 if (registered_with_another_class_loader) {
3937 ThrowDexFileAlreadyRegisteredError(self, dex_file);
3938 return nullptr;
Brian Carlstromaded5f72011-10-07 17:15:04 -07003939 }
Mathieu Chartiered4ee442018-06-05 14:23:35 -07003940 SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07003941 LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
3942 DCHECK(linear_alloc != nullptr);
3943 ClassTable* table;
3944 {
3945 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3946 table = InsertClassTableForClassLoader(class_loader);
3947 }
Brian Carlstrom47d237a2011-10-18 15:08:33 -07003948 // Don't alloc while holding the lock, since allocation may need to
3949 // suspend all threads and another thread may need the dex_lock_ to
3950 // get to a suspend point.
Vladimir Markocd556b02017-02-03 11:47:34 +00003951 StackHandleScope<3> hs(self);
3952 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
David Srbecky86d6cd52020-12-02 18:13:10 +00003953 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07003954 {
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03003955 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
3956 // a thread holding the dex lock and blocking on a condition variable regarding
3957 // weak references access, and a thread blocking on the dex lock.
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03003958 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
Andreas Gampecc1b5352016-12-01 16:58:38 -08003959 WriterMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08003960 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
3961 old_dex_cache = DecodeDexCacheLocked(self, old_data);
Andreas Gampefa4333d2017-02-14 11:10:34 -08003962 if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
David Srbecky33df0e32021-09-30 14:36:32 +00003963 // Do Initialize while holding dex lock to make sure two threads don't call it
David Srbecky86d6cd52020-12-02 18:13:10 +00003964 // at the same time with the same dex cache. Since the .bss is shared this can cause failing
3965 // DCHECK that the arrays are null.
David Srbecky33df0e32021-09-30 14:36:32 +00003966 h_dex_cache->Initialize(&dex_file, h_class_loader.Get());
Vladimir Markocd556b02017-02-03 11:47:34 +00003967 RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07003968 }
Alex Light725da8f2020-02-19 14:46:33 -08003969 if (old_dex_cache != nullptr) {
3970 // Another thread managed to initialize the dex cache faster, so use that DexCache.
3971 // If this thread encountered OOME, ignore it.
3972 DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
3973 self->ClearException();
3974 // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
3975 // dex_lock_.
3976 if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
3977 return old_dex_cache;
3978 } else {
3979 registered_with_another_class_loader = true;
3980 }
3981 }
Vladimir Markocd556b02017-02-03 11:47:34 +00003982 }
Alex Light725da8f2020-02-19 14:46:33 -08003983 if (registered_with_another_class_loader) {
3984 ThrowDexFileAlreadyRegisteredError(self, dex_file);
3985 return nullptr;
Vladimir Markocd556b02017-02-03 11:47:34 +00003986 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08003987 if (h_dex_cache == nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00003988 self->AssertPendingOOMException();
3989 return nullptr;
Brian Carlstrom47d237a2011-10-18 15:08:33 -07003990 }
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07003991 table->InsertStrongRoot(h_dex_cache.Get());
Mathieu Chartiera1467d02017-02-22 09:22:50 -08003992 if (h_class_loader.Get() != nullptr) {
3993 // Since we added a strong root to the class table, do the write barrier as required for
3994 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003995 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
Mathieu Chartiera1467d02017-02-22 09:22:50 -08003996 }
Nicolas Geoffray1d4f0092020-08-07 14:01:05 +01003997 VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
Nicolas Geoffray4f6bb442021-06-02 18:05:51 +01003998 PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003999 return h_dex_cache.Get();
Brian Carlstromaded5f72011-10-07 17:15:04 -07004000}
4001
Vladimir Markocd556b02017-02-03 11:47:34 +00004002bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08004003 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004004 return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004005}
4006
Vladimir Markocd556b02017-02-03 11:47:34 +00004007ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4008 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004009 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4010 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
Vladimir Markocd556b02017-02-03 11:47:34 +00004011 if (dex_cache != nullptr) {
4012 return dex_cache;
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07004013 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004014 // Failure, dump diagnostic and abort.
David Srbecky6fbcc292021-02-23 01:05:32 +00004015 for (const auto& entry : dex_caches_) {
4016 const DexCacheData& data = entry.second;
Alex Light725da8f2020-02-19 14:46:33 -08004017 if (DecodeDexCacheLocked(self, &data) != nullptr) {
David Srbecky6fbcc292021-02-23 01:05:32 +00004018 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004019 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004020 }
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004021 LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
David Srbecky6fbcc292021-02-23 01:05:32 +00004022 << " " << &dex_file;
Ian Rogerse0a02da2014-12-02 14:10:53 -08004023 UNREACHABLE();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004024}
4025
Santiago Aboy Solanes4b8ea5d2021-11-19 10:14:54 +00004026ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const OatDexFile& oat_dex_file) {
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +01004027 ReaderMutexLock mu(self, *Locks::dex_lock_);
4028 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(oat_dex_file);
4029 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4030 if (dex_cache != nullptr) {
4031 return dex_cache;
4032 }
4033 // Failure, dump diagnostic and abort.
4034 for (const auto& entry : dex_caches_) {
4035 const DexCacheData& data = entry.second;
4036 if (DecodeDexCacheLocked(self, &data) != nullptr) {
4037 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
4038 }
4039 }
Santiago Aboy Solanes4b8ea5d2021-11-19 10:14:54 +00004040 LOG(FATAL) << "Failed to find DexCache for OatDexFile " << oat_dex_file.GetDexFileLocation()
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +01004041 << " " << &oat_dex_file;
4042 UNREACHABLE();
4043}
4044
Vladimir Markocd556b02017-02-03 11:47:34 +00004045ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4046 const DexFile* dex_file = dex_cache->GetDexFile();
4047 DCHECK(dex_file != nullptr);
4048 ReaderMutexLock mu(self, *Locks::dex_lock_);
David Srbecky6fbcc292021-02-23 01:05:32 +00004049 auto it = dex_caches_.find(dex_file);
4050 if (it != dex_caches_.end()) {
4051 const DexCacheData& data = it->second;
4052 ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
4053 if (registered_dex_cache != nullptr) {
4054 CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4055 return data.class_table;
Vladimir Markocd556b02017-02-03 11:47:34 +00004056 }
4057 }
4058 return nullptr;
4059}
4060
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +01004061const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(
Santiago Aboy Solanes4b8ea5d2021-11-19 10:14:54 +00004062 const OatDexFile& oat_dex_file) {
4063 auto it = std::find_if(dex_caches_.begin(), dex_caches_.end(), [&](const auto& entry) {
4064 return entry.first->GetOatDexFile() == &oat_dex_file;
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +01004065 });
4066 return it != dex_caches_.end() ? &it->second : nullptr;
4067}
4068
Alex Light725da8f2020-02-19 14:46:33 -08004069const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
David Srbecky6fbcc292021-02-23 01:05:32 +00004070 auto it = dex_caches_.find(&dex_file);
4071 return it != dex_caches_.end() ? &it->second : nullptr;
Vladimir Markocd556b02017-02-03 11:47:34 +00004072}
4073
Vladimir Marko70e2a762019-07-12 16:49:00 +01004074void ClassLinker::CreatePrimitiveClass(Thread* self,
4075 Primitive::Type type,
4076 ClassRoot primitive_root) {
Vladimir Markoacb906d2018-05-30 10:23:49 +01004077 ObjPtr<mirror::Class> primitive_class =
Mathieu Chartier6beced42016-11-15 15:51:31 -08004078 AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
Vladimir Marko70e2a762019-07-12 16:49:00 +01004079 CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4080 // Do not hold lock on the primitive class object, the initialization of
4081 // primitive classes is done while the process is still single threaded.
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004082 primitive_class->SetAccessFlagsDuringLinking(kAccPublic | kAccFinal | kAccAbstract);
Vladimir Marko70e2a762019-07-12 16:49:00 +01004083 primitive_class->SetPrimitiveType(type);
4084 primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Vladimir Marko70e2a762019-07-12 16:49:00 +01004085 DCHECK_EQ(primitive_class->NumMethods(), 0u);
Vladimir Markobf121912019-06-04 13:49:05 +01004086 // Primitive classes are initialized during single threaded startup, so visibly initialized.
4087 primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004088 const char* descriptor = Primitive::Descriptor(type);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004089 ObjPtr<mirror::Class> existing = InsertClass(descriptor,
Vladimir Marko70e2a762019-07-12 16:49:00 +01004090 primitive_class,
Mathieu Chartier6beced42016-11-15 15:51:31 -08004091 ComputeModifiedUtf8Hash(descriptor));
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004092 CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
Vladimir Marko70e2a762019-07-12 16:49:00 +01004093 SetClassRoot(primitive_root, primitive_class);
Carl Shapiro565f5072011-07-10 13:39:43 -07004094}
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004095
Vladimir Marko02610552018-06-04 14:38:00 +01004096inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4097 return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4098}
4099
Brian Carlstrombe977852011-07-19 14:54:54 -07004100// Create an array class (i.e. the class object for the array, not the
4101// array itself). "descriptor" looks like "[C" or "[[[[B" or
4102// "[Ljava/lang/String;".
4103//
4104// If "descriptor" refers to an array of primitives, look up the
4105// primitive type's internally-generated class object.
4106//
Brian Carlstrom5b8e4c82011-09-18 01:38:59 -07004107// "class_loader" is the class loader of the class that's referring to
4108// us. It's used to ensure that we're looking for the element type in
4109// the right context. It does NOT become the class loader for the
4110// array class; that always comes from the base element class.
Brian Carlstrombe977852011-07-19 14:54:54 -07004111//
Mathieu Chartier2cebb242015-04-21 16:50:40 -07004112// Returns null with an exception raised on failure.
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004113ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4114 const char* descriptor,
4115 size_t hash,
4116 Handle<mirror::ClassLoader> class_loader) {
Brian Carlstrom5b8e4c82011-09-18 01:38:59 -07004117 // Identify the underlying component type
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004118 CHECK_EQ('[', descriptor[0]);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004119 StackHandleScope<2> hs(self);
Alex Lighte9f61032018-09-24 16:04:51 -07004120
4121 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4122 // code to be executed. We put it up here so we can avoid all the allocations associated with
4123 // creating the class. This can happen with (eg) jit threads.
4124 if (!self->CanLoadClasses()) {
4125 // Make sure we don't try to load anything, potentially causing an infinite loop.
4126 ObjPtr<mirror::Throwable> pre_allocated =
4127 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4128 self->SetException(pre_allocated);
4129 return nullptr;
4130 }
4131
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07004132 MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4133 class_loader)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004134 if (component_type == nullptr) {
Mathieu Chartierc0a9ea42014-02-03 16:36:49 -08004135 DCHECK(self->IsExceptionPending());
Andreas Gampedc13d7d2014-07-23 20:18:36 -07004136 // We need to accept erroneous classes as component types.
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08004137 const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4138 component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004139 if (component_type == nullptr) {
Andreas Gampedc13d7d2014-07-23 20:18:36 -07004140 DCHECK(self->IsExceptionPending());
4141 return nullptr;
4142 } else {
4143 self->ClearException();
4144 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004145 }
Ian Rogers2d10b202014-05-12 19:15:18 -07004146 if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4147 ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4148 return nullptr;
4149 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004150 // See if the component type is already loaded. Array classes are
4151 // always associated with the class loader of their underlying
4152 // element type -- an array of Strings goes with the loader for
4153 // java/lang/String -- so we need to look for it there. (The
4154 // caller should have checked for the existence of the class
4155 // before calling here, but they did so with *their* class loader,
4156 // not the component type's loader.)
4157 //
4158 // If we find it, the caller adds "loader" to the class' initiating
4159 // loader list, which should prevent us from going through this again.
4160 //
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07004161 // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004162 // are the same, because our caller (FindClass) just did the
4163 // lookup. (Even if we get this wrong we still have correct behavior,
4164 // because we effectively do this lookup again when we add the new
4165 // class to the hash table --- necessary because of possible races with
4166 // other threads.)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004167 if (class_loader.Get() != component_type->GetClassLoader()) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00004168 ObjPtr<mirror::Class> new_class =
4169 LookupClass(self, descriptor, hash, component_type->GetClassLoader());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004170 if (new_class != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004171 return new_class;
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004172 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004173 }
Vladimir Marko70e2a762019-07-12 16:49:00 +01004174 // Core array classes, i.e. Object[], Class[], String[] and primitive
4175 // arrays, have special initialization and they should be found above.
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00004176 DCHECK_IMPLIES(component_type->IsObjectClass(),
4177 // Guard from false positives for errors before setting superclass.
4178 component_type->IsErroneousUnresolved());
Vladimir Marko70e2a762019-07-12 16:49:00 +01004179 DCHECK(!component_type->IsStringClass());
4180 DCHECK(!component_type->IsClassClass());
4181 DCHECK(!component_type->IsPrimitive());
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004182
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004183 // Fill out the fields in the Class.
4184 //
4185 // It is possible to execute some methods against arrays, because
4186 // all arrays are subclasses of java_lang_Object_, so we need to set
4187 // up a vtable. We can just point at the one in java_lang_Object_.
4188 //
4189 // Array classes are simple enough that we don't need to do a full
4190 // link step.
Vladimir Marko70e2a762019-07-12 16:49:00 +01004191 size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4192 auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4193 size_t usable_size)
4194 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob68bb7a2020-03-17 10:55:25 +00004195 ScopedAssertNoNewTransactionRecords sanntr("CreateArrayClass");
Vladimir Marko70e2a762019-07-12 16:49:00 +01004196 mirror::Class::InitializeClassVisitor init_class(array_class_size);
4197 init_class(obj, usable_size);
4198 ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4199 klass->SetComponentType(component_type.Get());
4200 // Do not hold lock for initialization, the fence issued after the visitor
4201 // returns ensures memory visibility together with the implicit consume
4202 // semantics (for all supported architectures) for any thread that loads
4203 // the array class reference from any memory locations afterwards.
4204 FinishArrayClassSetup(klass);
4205 };
4206 auto new_class = hs.NewHandle<mirror::Class>(
4207 AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004208 if (new_class == nullptr) {
Vladimir Marko70e2a762019-07-12 16:49:00 +01004209 self->AssertPendingOOMException();
4210 return nullptr;
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004211 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004212
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004213 ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
Mathieu Chartierc0a9ea42014-02-03 16:36:49 -08004214 if (existing == nullptr) {
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004215 // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4216 // duplicate events in case of races. Array classes don't really follow dedicated
4217 // load and prepare, anyways.
4218 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4219 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4220
Tamas Berghammer160e6df2016-01-05 14:29:02 +00004221 jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004222 return new_class.Get();
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004223 }
4224 // Another thread must have loaded the class after we
4225 // started but before we finished. Abandon what we've
4226 // done.
4227 //
4228 // (Yes, this happens.)
4229
Vladimir Markobcf17522018-06-01 13:14:32 +01004230 return existing;
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004231}
4232
Vladimir Marko9186b182018-11-06 14:55:54 +00004233ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4234 ClassRoot class_root;
Ian Rogers62f05122014-03-21 11:21:29 -07004235 switch (type) {
Vladimir Marko9186b182018-11-06 14:55:54 +00004236 case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4237 case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4238 case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4239 case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4240 case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4241 case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4242 case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4243 case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4244 case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
Ian Rogers62f05122014-03-21 11:21:29 -07004245 default:
Vladimir Marko9186b182018-11-06 14:55:54 +00004246 return nullptr;
Carl Shapiro744ad052011-08-06 15:53:36 -07004247 }
Vladimir Marko9186b182018-11-06 14:55:54 +00004248 return GetClassRoot(class_root, this);
4249}
4250
4251ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4252 ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4253 if (UNLIKELY(result == nullptr)) {
4254 std::string printable_type(PrintableChar(type));
4255 ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4256 }
4257 return result;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004258}
4259
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004260ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4261 ObjPtr<mirror::Class> klass,
4262 size_t hash) {
Alex Lighte9f61032018-09-24 16:04:51 -07004263 DCHECK(Thread::Current()->CanLoadClasses());
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08004264 if (VLOG_IS_ON(class_linker)) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004265 ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
Brian Carlstromae826982011-11-09 01:33:42 -08004266 std::string source;
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004267 if (dex_cache != nullptr) {
Brian Carlstromae826982011-11-09 01:33:42 -08004268 source += " from ";
4269 source += dex_cache->GetLocation()->ToModifiedUtf8();
4270 }
4271 LOG(INFO) << "Loaded class " << descriptor << source;
4272 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004273 {
4274 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Vladimir Marko0984e482019-03-27 16:41:41 +00004275 const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
Mathieu Chartier65975772016-08-05 10:46:36 -07004276 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004277 ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004278 if (existing != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004279 return existing;
Mathieu Chartier65975772016-08-05 10:46:36 -07004280 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004281 VerifyObject(klass);
4282 class_table->InsertWithHash(klass, hash);
4283 if (class_loader != nullptr) {
4284 // This is necessary because we need to have the card dirtied for remembered sets.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004285 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier65975772016-08-05 10:46:36 -07004286 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004287 if (log_new_roots_) {
Mathieu Chartier65975772016-08-05 10:46:36 -07004288 new_class_roots_.push_back(GcRoot<mirror::Class>(klass));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004289 }
4290 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004291 if (kIsDebugBuild) {
4292 // Test that copied methods correctly can find their holder.
4293 for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4294 CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4295 }
Mathieu Chartier893263b2014-03-04 11:07:42 -08004296 }
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004297 return nullptr;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004298}
4299
Vladimir Marko1998cd02017-01-13 13:02:58 +00004300void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004301 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4302 DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4303 if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4304 new_bss_roots_boot_oat_files_.push_back(oat_file);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004305 }
4306}
4307
Alex Lighte64300b2015-12-15 15:02:47 -08004308// TODO This should really be in mirror::Class.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004309void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
Alex Lighte64300b2015-12-15 15:02:47 -08004310 LengthPrefixedArray<ArtMethod>* new_methods) {
4311 klass->SetMethodsPtrUnchecked(new_methods,
4312 klass->NumDirectMethods(),
4313 klass->NumDeclaredVirtualMethods());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004314 // Need to mark the card so that the remembered sets and mod union tables get updated.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004315 WriteBarrier::ForEveryFieldWrite(klass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004316}
4317
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004318ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4319 const char* descriptor,
4320 ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampe2ff3b972017-06-05 18:14:53 -07004321 return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4322}
4323
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004324ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4325 const char* descriptor,
4326 size_t hash,
4327 ObjPtr<mirror::ClassLoader> class_loader) {
Vladimir Marko1a1de672016-10-13 12:53:15 +01004328 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4329 ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4330 if (class_table != nullptr) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004331 ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
Vladimir Marko1a1de672016-10-13 12:53:15 +01004332 if (result != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004333 return result;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004334 }
Sameer Abu Asal2c6de222013-05-02 17:38:59 -07004335 }
Vladimir Marko1a1de672016-10-13 12:53:15 +01004336 return nullptr;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004337}
4338
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004339class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4340 public:
Igor Murashkin2ffb7032017-11-08 13:35:21 -08004341 MoveClassTableToPreZygoteVisitor() {}
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004342
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004343 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004344 REQUIRES(Locks::classlinker_classes_lock_)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01004345 REQUIRES_SHARED(Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004346 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier6b069532015-08-05 15:08:12 -07004347 if (class_table != nullptr) {
4348 class_table->FreezeSnapshot();
4349 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07004350 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004351};
4352
4353void ClassLinker::MoveClassTableToPreZygote() {
4354 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Andreas Gampe2af99022017-04-25 08:32:59 -07004355 boot_class_table_->FreezeSnapshot();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004356 MoveClassTableToPreZygoteVisitor visitor;
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07004357 VisitClassLoaders(&visitor);
Mathieu Chartierc2e20622014-11-03 11:41:47 -08004358}
4359
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004360// Look up classes by hash and descriptor and put all matching ones in the result array.
4361class LookupClassesVisitor : public ClassLoaderVisitor {
4362 public:
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004363 LookupClassesVisitor(const char* descriptor,
4364 size_t hash,
4365 std::vector<ObjPtr<mirror::Class>>* result)
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004366 : descriptor_(descriptor),
4367 hash_(hash),
4368 result_(result) {}
4369
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004370 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01004371 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004372 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004373 ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00004374 // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4375 if (klass != nullptr && klass->GetClassLoader() == class_loader) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004376 result_->push_back(klass);
4377 }
4378 }
4379
4380 private:
4381 const char* const descriptor_;
4382 const size_t hash_;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004383 std::vector<ObjPtr<mirror::Class>>* const result_;
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004384};
4385
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004386void ClassLinker::LookupClasses(const char* descriptor,
4387 std::vector<ObjPtr<mirror::Class>>& result) {
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004388 result.clear();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004389 Thread* const self = Thread::Current();
4390 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004391 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Andreas Gampe2af99022017-04-25 08:32:59 -07004392 ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004393 if (klass != nullptr) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00004394 DCHECK(klass->GetClassLoader() == nullptr);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004395 result.push_back(klass);
4396 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004397 LookupClassesVisitor visitor(descriptor, hash, &result);
4398 VisitClassLoaders(&visitor);
Elliott Hughes6fa602d2011-12-02 17:54:25 -08004399}
4400
Alex Lightf1f10492015-10-07 16:08:36 -07004401bool ClassLinker::AttemptSupertypeVerification(Thread* self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004402 verifier::VerifierDeps* verifier_deps,
Alex Lightf1f10492015-10-07 16:08:36 -07004403 Handle<mirror::Class> klass,
4404 Handle<mirror::Class> supertype) {
4405 DCHECK(self != nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08004406 DCHECK(klass != nullptr);
4407 DCHECK(supertype != nullptr);
Alex Lightf1f10492015-10-07 16:08:36 -07004408
Alex Lightf1f10492015-10-07 16:08:36 -07004409 if (!supertype->IsVerified() && !supertype->IsErroneous()) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004410 VerifyClass(self, verifier_deps, supertype);
Alex Lightf1f10492015-10-07 16:08:36 -07004411 }
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004412
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004413 if (supertype->IsVerified()
4414 || supertype->ShouldVerifyAtRuntime()
4415 || supertype->IsVerifiedNeedsAccessChecks()) {
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004416 // The supertype is either verified, or we soft failed at AOT time.
4417 DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
Alex Lightf1f10492015-10-07 16:08:36 -07004418 return true;
4419 }
4420 // If we got this far then we have a hard failure.
4421 std::string error_msg =
4422 StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
David Sehr709b0702016-10-13 09:12:37 -07004423 klass->PrettyDescriptor().c_str(),
4424 supertype->PrettyDescriptor().c_str());
Alex Lightf1f10492015-10-07 16:08:36 -07004425 LOG(WARNING) << error_msg << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004426 StackHandleScope<1> hs(self);
Alex Lightf1f10492015-10-07 16:08:36 -07004427 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004428 if (cause != nullptr) {
Alex Lightf1f10492015-10-07 16:08:36 -07004429 // Set during VerifyClass call (if at all).
4430 self->ClearException();
4431 }
4432 // Change into a verify error.
4433 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
Andreas Gampefa4333d2017-02-14 11:10:34 -08004434 if (cause != nullptr) {
Alex Lightf1f10492015-10-07 16:08:36 -07004435 self->GetException()->SetCause(cause.Get());
4436 }
4437 ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4438 if (Runtime::Current()->IsAotCompiler()) {
4439 Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4440 }
Andreas Gampe884f3b82016-03-30 19:52:58 -07004441 // Need to grab the lock to change status.
4442 ObjectLock<mirror::Class> super_lock(self, klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00004443 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Alex Lightf1f10492015-10-07 16:08:36 -07004444 return false;
4445}
4446
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004447verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
4448 verifier::VerifierDeps* verifier_deps,
4449 Handle<mirror::Class> klass,
4450 verifier::HardFailLogMode log_level) {
Andreas Gampe884f3b82016-03-30 19:52:58 -07004451 {
4452 // TODO: assert that the monitor on the Class is held
4453 ObjectLock<mirror::Class> lock(self, klass);
Elliott Hughesd9c67be2012-02-02 19:54:06 -08004454
Andreas Gampe884f3b82016-03-30 19:52:58 -07004455 // Is somebody verifying this now?
Vladimir Marko2c64a832018-01-04 11:31:56 +00004456 ClassStatus old_status = klass->GetStatus();
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004457 while (old_status == ClassStatus::kVerifying) {
Andreas Gampe884f3b82016-03-30 19:52:58 -07004458 lock.WaitIgnoringInterrupts();
Mathieu Chartier5ef70202017-06-29 10:45:10 -07004459 // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4460 // case we may see the same status again. b/62912904. This is why the check is
4461 // greater or equal.
4462 CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
David Sehr709b0702016-10-13 09:12:37 -07004463 << "Class '" << klass->PrettyClass()
4464 << "' performed an illegal verification state transition from " << old_status
4465 << " to " << klass->GetStatus();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004466 old_status = klass->GetStatus();
4467 }
jeffhao98eacac2011-09-14 16:11:53 -07004468
Andreas Gampe884f3b82016-03-30 19:52:58 -07004469 // The class might already be erroneous, for example at compile time if we attempted to verify
4470 // this class as a parent to another.
4471 if (klass->IsErroneous()) {
4472 ThrowEarlierClassFailure(klass.Get());
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004473 return verifier::FailureKind::kHardFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004474 }
Brian Carlstrom9b5ee882012-02-28 09:48:54 -08004475
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004476 // Don't attempt to re-verify if already verified.
Andreas Gampe884f3b82016-03-30 19:52:58 -07004477 if (klass->IsVerified()) {
Nicolas Geoffray80789962021-04-30 16:50:39 +01004478 if (verifier_deps != nullptr &&
4479 verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
4480 !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
4481 !Runtime::Current()->IsAotCompiler()) {
4482 // If the klass is verified, but `verifier_deps` did not record it, this
4483 // means we are running background verification of a secondary dex file.
4484 // Re-run the verifier to populate `verifier_deps`.
4485 // No need to run the verification when running on the AOT Compiler, as
4486 // the driver handles those multithreaded cases already.
4487 std::string error_msg;
4488 verifier::FailureKind failure =
4489 PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4490 // We could have soft failures, so just check that we don't have a hard
4491 // failure.
4492 DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
4493 }
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004494 return verifier::FailureKind::kNoFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004495 }
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004496
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004497 if (klass->IsVerifiedNeedsAccessChecks()) {
4498 if (!Runtime::Current()->IsAotCompiler()) {
4499 // Mark the class as having a verification attempt to avoid re-running
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004500 // the verifier.
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004501 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4502 }
4503 return verifier::FailureKind::kAccessChecksFailure;
4504 }
4505
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004506 // For AOT, don't attempt to re-verify if we have already found we should
4507 // verify at runtime.
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004508 if (klass->ShouldVerifyAtRuntime()) {
4509 CHECK(Runtime::Current()->IsAotCompiler());
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004510 return verifier::FailureKind::kSoftFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004511 }
jeffhao98eacac2011-09-14 16:11:53 -07004512
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004513 DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4514 mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
Andreas Gampe884f3b82016-03-30 19:52:58 -07004515
4516 // Skip verification if disabled.
4517 if (!Runtime::Current()->IsVerificationEnabled()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00004518 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004519 UpdateClassAfterVerification(klass, image_pointer_size_, verifier::FailureKind::kNoFailure);
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004520 return verifier::FailureKind::kNoFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004521 }
Jeff Hao4a200f52014-04-01 14:58:49 -07004522 }
4523
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004524 VLOG(class_linker) << "Beginning verification for class: "
4525 << klass->PrettyDescriptor()
4526 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4527
Ian Rogers9ffb0392012-09-10 11:56:50 -07004528 // Verify super class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004529 StackHandleScope<2> hs(self);
Alex Lightf1f10492015-10-07 16:08:36 -07004530 MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4531 // If we have a superclass and we get a hard verification failure we can return immediately.
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004532 if (supertype != nullptr &&
4533 !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
Alex Lightf1f10492015-10-07 16:08:36 -07004534 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004535 return verifier::FailureKind::kHardFailure;
Alex Lightf1f10492015-10-07 16:08:36 -07004536 }
Ian Rogers1c5eb702012-02-01 09:18:34 -08004537
Alex Lightf1f10492015-10-07 16:08:36 -07004538 // Verify all default super-interfaces.
4539 //
4540 // (1) Don't bother if the superclass has already had a soft verification failure.
4541 //
4542 // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4543 // recursive initialization by themselves. This is because when an interface is initialized
4544 // directly it must not initialize its superinterfaces. We are allowed to verify regardless
4545 // but choose not to for an optimization. If the interfaces is being verified due to a class
4546 // initialization (which would need all the default interfaces to be verified) the class code
4547 // will trigger the recursive verification anyway.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004548 if ((supertype == nullptr || supertype->IsVerified()) // See (1)
Alex Lightf1f10492015-10-07 16:08:36 -07004549 && !klass->IsInterface()) { // See (2)
4550 int32_t iftable_count = klass->GetIfTableCount();
4551 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
4552 // Loop through all interfaces this class has defined. It doesn't matter the order.
4553 for (int32_t i = 0; i < iftable_count; i++) {
4554 iface.Assign(klass->GetIfTable()->GetInterface(i));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004555 DCHECK(iface != nullptr);
Alex Lightf1f10492015-10-07 16:08:36 -07004556 // We only care if we have default interfaces and can skip if we are already verified...
4557 if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
4558 continue;
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004559 } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
Alex Lightf1f10492015-10-07 16:08:36 -07004560 // We had a hard failure while verifying this interface. Just return immediately.
4561 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004562 return verifier::FailureKind::kHardFailure;
Alex Lightf1f10492015-10-07 16:08:36 -07004563 } else if (UNLIKELY(!iface->IsVerified())) {
4564 // We softly failed to verify the iface. Stop checking and clean up.
4565 // Put the iface into the supertype handle so we know what caused us to fail.
4566 supertype.Assign(iface.Get());
4567 break;
Ian Rogers1c5eb702012-02-01 09:18:34 -08004568 }
Ian Rogers1c5eb702012-02-01 09:18:34 -08004569 }
4570 }
4571
Alex Lightf1f10492015-10-07 16:08:36 -07004572 // At this point if verification failed, then supertype is the "first" supertype that failed
4573 // verification (without a specific order). If verification succeeded, then supertype is either
4574 // null or the original superclass of klass and is verified.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004575 DCHECK(supertype == nullptr ||
Alex Lightf1f10492015-10-07 16:08:36 -07004576 supertype.Get() == klass->GetSuperClass() ||
4577 !supertype->IsVerified());
4578
Elliott Hughes634eb2e2012-03-22 16:06:28 -07004579 // Try to use verification information from the oat file, otherwise do runtime verification.
Ian Rogers4445a7e2012-10-05 17:19:13 -07004580 const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
Vladimir Marko2c64a832018-01-04 11:31:56 +00004581 ClassStatus oat_file_class_status(ClassStatus::kNotReady);
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004582 bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004583
4584 VLOG(class_linker) << "Class preverified status for class "
4585 << klass->PrettyDescriptor()
4586 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4587 << ": "
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004588 << preverified
4589 << "( " << oat_file_class_status << ")";
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004590
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004591 // If the oat file says the class had an error, re-run the verifier. That way we will either:
4592 // 1) Be successful at runtime, or
4593 // 2) Get a precise error message.
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00004594 DCHECK_IMPLIES(mirror::Class::IsErroneous(oat_file_class_status), !preverified);
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004595
Ian Rogers62d6c772013-02-27 08:32:07 -08004596 std::string error_msg;
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004597 verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
jeffhaof1e6b7c2012-06-05 18:33:30 -07004598 if (!preverified) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004599 verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004600 } else if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks) {
4601 verifier_failure = verifier::FailureKind::kAccessChecksFailure;
jeffhaof1e6b7c2012-06-05 18:33:30 -07004602 }
Andreas Gampe884f3b82016-03-30 19:52:58 -07004603
4604 // Verification is done, grab the lock again.
4605 ObjectLock<mirror::Class> lock(self, klass);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004606 self->AssertNoPendingException();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004607
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004608 if (verifier_failure == verifier::FailureKind::kHardFailure) {
David Sehr709b0702016-10-13 09:12:37 -07004609 VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004610 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4611 << " because: " << error_msg;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004612 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004613 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004614 return verifier_failure;
jeffhao5cfd6fb2011-09-27 13:54:29 -07004615 }
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004616
4617 // Make sure all classes referenced by catch blocks are resolved.
4618 ResolveClassExceptionHandlerTypes(klass);
4619
Nicolas Geoffray486dda02017-09-11 14:15:52 +01004620 if (Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004621 if (supertype != nullptr && supertype->ShouldVerifyAtRuntime()) {
4622 // Regardless of our own verification result, we need to verify the class
4623 // at runtime if the super class is not verified. This is required in case
4624 // we generate an app/boot image.
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004625 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4626 } else if (verifier_failure == verifier::FailureKind::kNoFailure) {
4627 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4628 } else if (verifier_failure == verifier::FailureKind::kSoftFailure ||
4629 verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
4630 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4631 } else {
4632 mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
4633 }
4634 // Notify the compiler about the verification status, in case the class
4635 // was verified implicitly (eg super class of a compiled class). When the
4636 // compiler unloads dex file after compilation, we still want to keep
4637 // verification states.
Nicolas Geoffray486dda02017-09-11 14:15:52 +01004638 Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
4639 ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004640 } else {
4641 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Nicolas Geoffray486dda02017-09-11 14:15:52 +01004642 }
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004643
4644 UpdateClassAfterVerification(klass, image_pointer_size_, verifier_failure);
Nicolas Geoffray08025182016-10-25 17:20:18 +01004645 return verifier_failure;
Andreas Gampe48498592014-09-10 19:48:05 -07004646}
4647
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004648verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004649 verifier::VerifierDeps* verifier_deps,
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004650 Handle<mirror::Class> klass,
4651 verifier::HardFailLogMode log_level,
4652 std::string* error_msg) {
4653 Runtime* const runtime = Runtime::Current();
Nicolas Geoffray7744b692021-07-06 16:19:32 +01004654 StackHandleScope<2> hs(self);
4655 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
4656 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004657 return verifier::ClassVerifier::VerifyClass(self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004658 verifier_deps,
Nicolas Geoffray7744b692021-07-06 16:19:32 +01004659 dex_cache->GetDexFile(),
4660 klass,
4661 dex_cache,
4662 class_loader,
4663 *klass->GetClassDef(),
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004664 runtime->GetCompilerCallbacks(),
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004665 log_level,
4666 Runtime::Current()->GetTargetSdkVersion(),
4667 error_msg);
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004668}
4669
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004670bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
4671 const DexFile& dex_file,
4672 Handle<mirror::Class> klass,
Vladimir Marko2c64a832018-01-04 11:31:56 +00004673 ClassStatus& oat_file_class_status) {
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004674 // If we're compiling, we can only verify the class using the oat file if
4675 // we are not compiling the image or if the class we're verifying is not part of
Andreas Gampee9934582018-01-19 21:23:04 -08004676 // the compilation unit (app - dependencies). We will let the compiler callback
4677 // tell us about the latter.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08004678 if (Runtime::Current()->IsAotCompiler()) {
Andreas Gampee9934582018-01-19 21:23:04 -08004679 CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004680 // We are compiling an app (not the image).
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004681 if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004682 return false;
4683 }
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004684 }
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004685
Andreas Gampeb40d3612018-06-26 15:49:42 -07004686 const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004687 // In case we run without an image there won't be a backing oat file.
Mathieu Chartier1b868492016-11-16 16:22:37 -08004688 if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
Anwar Ghuloumad256bb2013-07-18 14:58:55 -07004689 return false;
4690 }
4691
Ian Rogers8b2c0b92013-09-19 02:56:49 -07004692 uint16_t class_def_index = klass->GetDexClassDefIndex();
Vladimir Markod3c5beb2014-04-11 16:32:51 +01004693 oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
Vladimir Marko2c64a832018-01-04 11:31:56 +00004694 if (oat_file_class_status >= ClassStatus::kVerified) {
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004695 return true;
4696 }
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004697 if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4698 // We return that the clas has already been verified, and the caller should
4699 // check the class status to ensure we run with access checks.
4700 return true;
4701 }
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004702
4703 // Check the class status with the vdex file.
4704 const OatFile* oat_file = oat_dex_file->GetOatFile();
4705 if (oat_file != nullptr) {
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004706 ClassStatus vdex_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
4707 if (vdex_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
Nicolas Geoffray327cfcf2021-10-12 14:13:25 +01004708 VLOG(verifier) << "Vdex verification success for " << klass->PrettyClass();
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004709 oat_file_class_status = vdex_status;
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004710 return true;
4711 }
4712 }
4713
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004714 // If we only verified a subset of the classes at compile time, we can end up with classes that
4715 // were resolved by the verifier.
Vladimir Marko2c64a832018-01-04 11:31:56 +00004716 if (oat_file_class_status == ClassStatus::kResolved) {
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004717 return false;
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004718 }
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004719 // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
4720 CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
4721 << klass->PrettyClass() << " " << dex_file.GetLocation();
4722
Vladimir Marko72ab6842017-01-20 19:32:50 +00004723 if (mirror::Class::IsErroneous(oat_file_class_status)) {
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004724 // Compile time verification failed with a hard error. We'll re-run
4725 // verification, which might be successful at runtime.
jeffhao1ac29442012-03-26 11:37:32 -07004726 return false;
4727 }
Vladimir Marko2c64a832018-01-04 11:31:56 +00004728 if (oat_file_class_status == ClassStatus::kNotReady) {
Ian Rogersc4762272012-02-01 15:55:55 -08004729 // Status is uninitialized if we couldn't determine the status at compile time, for example,
4730 // not loading the class.
4731 // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
4732 // isn't a problem and this case shouldn't occur
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004733 return false;
4734 }
Ian Rogers1ff3c982014-08-12 02:30:58 -07004735 std::string temp;
Elliott Hughes634eb2e2012-03-22 16:06:28 -07004736 LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
David Sehr709b0702016-10-13 09:12:37 -07004737 << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
Ian Rogers1ff3c982014-08-12 02:30:58 -07004738 << klass->GetDescriptor(&temp);
Ian Rogerse0a02da2014-12-02 14:10:53 -08004739 UNREACHABLE();
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004740}
4741
Alex Light5a559862016-01-29 12:24:48 -08004742void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
Alex Light51a64d52015-12-17 13:55:59 -08004743 for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
Alex Light5a559862016-01-29 12:24:48 -08004744 ResolveMethodExceptionHandlerTypes(&method);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004745 }
4746}
4747
Alex Light5a559862016-01-29 12:24:48 -08004748void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004749 // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
David Sehr0225f8e2018-01-31 08:52:24 +00004750 CodeItemDataAccessor accessor(method->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004751 if (!accessor.HasCodeItem()) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004752 return; // native or abstract method
4753 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004754 if (accessor.TriesSize() == 0) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004755 return; // nothing to process
4756 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004757 const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004758 uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004759 for (uint32_t idx = 0; idx < handlers_size; idx++) {
4760 CatchHandlerIterator iterator(handlers_ptr);
4761 for (; iterator.HasNext(); iterator.Next()) {
4762 // Ensure exception types are resolved so that they don't need resolution to be delivered,
4763 // unresolved exception types will be ignored by exception delivery
Andreas Gampea5b09a62016-11-17 15:21:22 -08004764 if (iterator.GetHandlerTypeIndex().IsValid()) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004765 ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004766 if (exception_type == nullptr) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004767 DCHECK(Thread::Current()->IsExceptionPending());
4768 Thread::Current()->ClearException();
4769 }
4770 }
4771 }
4772 handlers_ptr = iterator.EndDataPointer();
4773 }
4774}
4775
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004776ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
4777 jstring name,
4778 jobjectArray interfaces,
4779 jobject loader,
4780 jobjectArray methods,
4781 jobjectArray throws) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07004782 Thread* self = soa.Self();
Alex Lighte9f61032018-09-24 16:04:51 -07004783
4784 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4785 // code to be executed. We put it up here so we can avoid all the allocations associated with
4786 // creating the class. This can happen with (eg) jit-threads.
4787 if (!self->CanLoadClasses()) {
4788 // Make sure we don't try to load anything, potentially causing an infinite loop.
4789 ObjPtr<mirror::Throwable> pre_allocated =
4790 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4791 self->SetException(pre_allocated);
4792 return nullptr;
4793 }
4794
Alex Light133987d2020-03-26 19:22:12 +00004795 StackHandleScope<12> hs(self);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004796 MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004797 AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004798 if (temp_klass == nullptr) {
Ian Rogersa436fde2013-08-27 23:34:06 -07004799 CHECK(self->IsExceptionPending()); // OOME.
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004800 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07004801 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004802 DCHECK(temp_klass->GetClass() != nullptr);
4803 temp_klass->SetObjectSize(sizeof(mirror::Proxy));
Igor Murashkindf707e42016-02-02 16:56:50 -08004804 // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
4805 // the methods.
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004806 temp_klass->SetAccessFlagsDuringLinking(kAccClassIsProxy | kAccPublic | kAccFinal);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004807 temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
4808 DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
4809 temp_klass->SetName(soa.Decode<mirror::String>(name));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004810 temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
Mathieu Chartier6beced42016-11-15 15:51:31 -08004811 // Object has an empty iftable, copy it for that reason.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004812 temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004813 mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
Vladimir Marko3892e622019-03-15 15:22:18 +00004814 std::string storage;
4815 const char* descriptor = temp_klass->GetDescriptor(&storage);
4816 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004817
Mathieu Chartierd57d4542015-10-14 10:55:30 -07004818 // Needs to be before we insert the class so that the allocator field is set.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004819 LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
Mathieu Chartierd57d4542015-10-14 10:55:30 -07004820
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004821 // Insert the class before loading the fields as the field roots
4822 // (ArtField::declaring_class_) are only visited from the class
4823 // table. There can't be any suspend points between inserting the
4824 // class and setting the field arrays below.
Vladimir Marko3892e622019-03-15 15:22:18 +00004825 ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004826 CHECK(existing == nullptr);
Ian Rogersc2b44472011-12-14 21:17:17 -08004827
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004828 // Instance fields are inherited, but we add a couple of static fields...
Mathieu Chartierc7853442015-03-27 14:35:38 -07004829 const size_t num_fields = 2;
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07004830 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004831 temp_klass->SetSFieldsPtr(sfields);
Mathieu Chartierc7853442015-03-27 14:35:38 -07004832
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004833 // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
4834 // our proxy, so Class.getInterfaces doesn't return the flattened set.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004835 ArtField& interfaces_sfield = sfields->At(0);
4836 interfaces_sfield.SetDexFieldIndex(0);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004837 interfaces_sfield.SetDeclaringClass(temp_klass.Get());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004838 interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
Mathieu Chartierc7853442015-03-27 14:35:38 -07004839
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004840 // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004841 ArtField& throws_sfield = sfields->At(1);
4842 throws_sfield.SetDexFieldIndex(1);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004843 throws_sfield.SetDeclaringClass(temp_klass.Get());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004844 throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
Jesse Wilson95caa792011-10-12 18:14:17 -04004845
Ian Rogers466bb252011-10-14 03:29:56 -07004846 // Proxies have 1 direct method, the constructor
Alex Lighte64300b2015-12-15 15:02:47 -08004847 const size_t num_direct_methods = 1;
Jesse Wilson95caa792011-10-12 18:14:17 -04004848
Alex Light133987d2020-03-26 19:22:12 +00004849 // The array we get passed contains all methods, including private and static
4850 // ones that aren't proxied. We need to filter those out since only interface
4851 // methods (non-private & virtual) are actually proxied.
4852 Handle<mirror::ObjectArray<mirror::Method>> h_methods =
4853 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
Vladimir Marko679730e2018-05-25 15:06:48 +01004854 DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
David Sehr709b0702016-10-13 09:12:37 -07004855 << mirror::Class::PrettyClass(h_methods->GetClass());
Alex Light133987d2020-03-26 19:22:12 +00004856 // List of the actual virtual methods this class will have.
4857 std::vector<ArtMethod*> proxied_methods;
4858 std::vector<size_t> proxied_throws_idx;
4859 proxied_methods.reserve(h_methods->GetLength());
4860 proxied_throws_idx.reserve(h_methods->GetLength());
4861 // Filter out to only the non-private virtual methods.
4862 for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
4863 ArtMethod* m = mirror->GetArtMethod();
4864 if (!m->IsPrivate() && !m->IsStatic()) {
4865 proxied_methods.push_back(m);
4866 proxied_throws_idx.push_back(idx);
4867 }
4868 }
4869 const size_t num_virtual_methods = proxied_methods.size();
Alex Lightbc115092020-03-27 11:25:16 -07004870 // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
4871 // contains an array of all the classes each function is declared to throw.
4872 // This is used to wrap unexpected exceptions in a
4873 // UndeclaredThrowableException exception. This array is in the same order as
4874 // the methods array and like the methods array must be filtered to remove any
4875 // non-proxied methods.
Alex Light133987d2020-03-26 19:22:12 +00004876 const bool has_filtered_methods =
4877 static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
4878 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
4879 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
4880 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
4881 hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
4882 (has_filtered_methods)
4883 ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
4884 self, original_proxied_throws->GetClass(), num_virtual_methods)
4885 : original_proxied_throws.Get()));
Alex Lightbc115092020-03-27 11:25:16 -07004886 if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
4887 self->AssertPendingOOMException();
4888 return nullptr;
4889 }
Alex Light133987d2020-03-26 19:22:12 +00004890 if (has_filtered_methods) {
4891 for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
4892 DCHECK_LE(new_idx, orig_idx);
4893 proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
4894 }
4895 }
Alex Lighte64300b2015-12-15 15:02:47 -08004896
4897 // Create the methods array.
4898 LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
4899 self, allocator, num_direct_methods + num_virtual_methods);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004900 // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
4901 // want to throw OOM in the future.
Alex Lighte64300b2015-12-15 15:02:47 -08004902 if (UNLIKELY(proxy_class_methods == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004903 self->AssertPendingOOMException();
4904 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07004905 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004906 temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
Alex Lighte64300b2015-12-15 15:02:47 -08004907
4908 // Create the single direct method.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004909 CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
Alex Lighte64300b2015-12-15 15:02:47 -08004910
4911 // Create virtual method using specified prototypes.
4912 // TODO These should really use the iterators.
Jesse Wilson95caa792011-10-12 18:14:17 -04004913 for (size_t i = 0; i < num_virtual_methods; ++i) {
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004914 auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
Alex Light133987d2020-03-26 19:22:12 +00004915 auto* prototype = proxied_methods[i];
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004916 CreateProxyMethod(temp_klass, prototype, virtual_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004917 DCHECK(virtual_method->GetDeclaringClass() != nullptr);
4918 DCHECK(prototype->GetDeclaringClass() != nullptr);
Jesse Wilson95caa792011-10-12 18:14:17 -04004919 }
Ian Rogersc2b44472011-12-14 21:17:17 -08004920
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004921 // The super class is java.lang.reflect.Proxy
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004922 temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004923 // Now effectively in the loaded state.
Vladimir Marko2c64a832018-01-04 11:31:56 +00004924 mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
Ian Rogers62d6c772013-02-27 08:32:07 -08004925 self->AssertNoPendingException();
Ian Rogersc2b44472011-12-14 21:17:17 -08004926
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004927 // At this point the class is loaded. Publish a ClassLoad event.
4928 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
4929 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
4930
4931 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
Ian Rogersc8982582012-09-07 16:53:25 -07004932 {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004933 // Must hold lock on object when resolved.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004934 ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004935 // Link the fields and virtual methods, creating vtable and iftables.
4936 // The new class will replace the old one in the class table.
Mathieu Chartiere401d142015-04-22 13:56:20 -07004937 Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
Mathieu Chartier0795f232016-09-27 18:43:30 -07004938 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
Vladimir Marko3892e622019-03-15 15:22:18 +00004939 if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
Vladimir Markoa4d28dd2021-06-30 11:28:06 +01004940 if (!temp_klass->IsErroneous()) {
4941 mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
4942 }
Mathieu Chartierc528dba2013-11-26 12:00:11 -08004943 return nullptr;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004944 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07004945 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004946 CHECK(temp_klass->IsRetired());
4947 CHECK_NE(temp_klass.Get(), klass.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07004948
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004949 CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
Mathieu Chartier0795f232016-09-27 18:43:30 -07004950 interfaces_sfield.SetObject<false>(
4951 klass.Get(),
Mathieu Chartierf8ac97f2016-10-05 15:56:52 -07004952 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004953 CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
4954 throws_sfield.SetObject<false>(
Mathieu Chartier0795f232016-09-27 18:43:30 -07004955 klass.Get(),
Alex Light133987d2020-03-26 19:22:12 +00004956 proxied_throws.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07004957
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004958 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
4959
Vladimir Marko305c38b2018-02-14 11:50:07 +00004960 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
4961 // See also ClassLinker::EnsureInitialized().
4962 if (kBitstringSubtypeCheckEnabled) {
4963 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
4964 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
4965 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
4966 }
4967
Vladimir Markobf121912019-06-04 13:49:05 +01004968 VisiblyInitializedCallback* callback = nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -07004969 {
4970 // Lock on klass is released. Lock new class object.
4971 ObjectLock<mirror::Class> initialization_lock(self, klass);
Vladimir Markobf121912019-06-04 13:49:05 +01004972 // Conservatively go through the ClassStatus::kInitialized state.
4973 callback = MarkClassInitialized(self, klass);
4974 }
4975 if (callback != nullptr) {
4976 callback->MakeVisible(self);
Ian Rogersc8982582012-09-07 16:53:25 -07004977 }
Ian Rogersc2b44472011-12-14 21:17:17 -08004978
David Srbecky346fd962020-07-27 16:51:00 +01004979 // Consistency checks.
Elliott Hughes67d92002012-03-26 15:08:51 -07004980 if (kIsDebugBuild) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004981 CHECK(klass->GetIFieldsPtr() == nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004982 CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
4983
Ian Rogersc2b44472011-12-14 21:17:17 -08004984 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004985 auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
Alex Light133987d2020-03-26 19:22:12 +00004986 CheckProxyMethod(virtual_method, proxied_methods[i]);
Ian Rogersc2b44472011-12-14 21:17:17 -08004987 }
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004988
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004989 StackHandleScope<1> hs2(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07004990 Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004991 std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
Mathieu Chartier590fee92013-09-13 13:46:47 -07004992 decoded_name->ToModifiedUtf8().c_str()));
David Sehr709b0702016-10-13 09:12:37 -07004993 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004994
4995 std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
Mathieu Chartier590fee92013-09-13 13:46:47 -07004996 decoded_name->ToModifiedUtf8().c_str()));
David Sehr709b0702016-10-13 09:12:37 -07004997 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
Ian Rogersc2b44472011-12-14 21:17:17 -08004998
Narayan Kamath6b2dc312017-03-14 13:26:12 +00004999 CHECK_EQ(klass.Get()->GetProxyInterfaces(),
Mathieu Chartierf8ac97f2016-10-05 15:56:52 -07005000 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
Narayan Kamath6b2dc312017-03-14 13:26:12 +00005001 CHECK_EQ(klass.Get()->GetProxyThrows(),
Alex Light133987d2020-03-26 19:22:12 +00005002 proxied_throws.Get());
Ian Rogersc2b44472011-12-14 21:17:17 -08005003 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005004 return klass.Get();
Jesse Wilson95caa792011-10-12 18:14:17 -04005005}
5006
Mathieu Chartiere401d142015-04-22 13:56:20 -07005007void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5008 // Create constructor for Proxy that must initialize the method.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005009 ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5010 CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
Przemyslaw Szczepaniakf11cd292016-08-17 17:46:38 +01005011
Igor Murashkin9d3d7522017-02-27 10:39:49 -08005012 // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5013 // on which front-end compiler was used to build the libcore DEX files.
Alex Light6cae5ea2018-06-07 17:07:02 -07005014 ArtMethod* proxy_constructor =
5015 jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init);
Igor Murashkin9d3d7522017-02-27 10:39:49 -08005016 DCHECK(proxy_constructor != nullptr)
5017 << "Could not find <init> method in java.lang.reflect.Proxy";
5018
Jeff Haodb8a6642014-08-14 17:18:52 -07005019 // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5020 // code_ too)
Mathieu Chartiere401d142015-04-22 13:56:20 -07005021 DCHECK(out != nullptr);
5022 out->CopyFrom(proxy_constructor, image_pointer_size_);
Vladimir Markoba118822017-06-12 15:41:56 +01005023 // Make this constructor public and fix the class to be our Proxy version.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005024 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
Vladimir Markoba118822017-06-12 15:41:56 +01005025 // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005026 out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5027 kAccPublic |
5028 kAccCompileDontBother);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005029 out->SetDeclaringClass(klass.Get());
Vladimir Markod1ee20f2017-08-17 09:21:16 +00005030
5031 // Set the original constructor method.
5032 out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
Ian Rogersc2b44472011-12-14 21:17:17 -08005033}
5034
Mathieu Chartiere401d142015-04-22 13:56:20 -07005035void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
Ian Rogers466bb252011-10-14 03:29:56 -07005036 CHECK(constructor->IsConstructor());
Mathieu Chartiere401d142015-04-22 13:56:20 -07005037 auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5038 CHECK_STREQ(np->GetName(), "<init>");
5039 CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
Ian Rogers466bb252011-10-14 03:29:56 -07005040 DCHECK(constructor->IsPublic());
Jesse Wilson95caa792011-10-12 18:14:17 -04005041}
5042
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005043void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005044 ArtMethod* out) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08005045 // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
Ian Rogers466bb252011-10-14 03:29:56 -07005046 // as necessary
Mathieu Chartiere401d142015-04-22 13:56:20 -07005047 DCHECK(out != nullptr);
5048 out->CopyFrom(prototype, image_pointer_size_);
Ian Rogers466bb252011-10-14 03:29:56 -07005049
Alex Lighte9dd04f2016-03-16 16:09:45 -07005050 // Set class to be the concrete proxy class.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005051 out->SetDeclaringClass(klass.Get());
Vladimir Markode0d0de2021-03-18 14:12:35 +00005052 // Clear the abstract and default flags to ensure that defaults aren't picked in
Alex Lighte9dd04f2016-03-16 16:09:45 -07005053 // preference to the invocation handler.
Vladimir Markode0d0de2021-03-18 14:12:35 +00005054 const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
Alex Lighte9dd04f2016-03-16 16:09:45 -07005055 // Make the method final.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005056 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5057 const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
Alex Lighte9dd04f2016-03-16 16:09:45 -07005058 out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5059
Vladimir Markod1ee20f2017-08-17 09:21:16 +00005060 // Set the original interface method.
5061 out->SetDataPtrSize(prototype, image_pointer_size_);
5062
Ian Rogers466bb252011-10-14 03:29:56 -07005063 // At runtime the method looks like a reference and argument saving method, clone the code
5064 // related parameters from this method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005065 out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
Ian Rogersc2b44472011-12-14 21:17:17 -08005066}
Jesse Wilson95caa792011-10-12 18:14:17 -04005067
Mathieu Chartiere401d142015-04-22 13:56:20 -07005068void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
David Srbecky346fd962020-07-27 16:51:00 +01005069 // Basic consistency checks.
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08005070 CHECK(!prototype->IsFinal());
5071 CHECK(method->IsFinal());
Alex Light9139e002015-10-09 15:59:48 -07005072 CHECK(method->IsInvokable());
Ian Rogers19846512012-02-24 11:42:47 -08005073
5074 // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5075 // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
Ian Rogers19846512012-02-24 11:42:47 -08005076 CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
Vladimir Marko5c3e9d12017-08-30 16:43:54 +01005077 CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
Jesse Wilson95caa792011-10-12 18:14:17 -04005078}
5079
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005080bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass, bool can_init_statics,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005081 bool can_init_parents) {
Brian Carlstrom610e49f2013-11-04 17:07:22 -08005082 if (can_init_statics && can_init_parents) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005083 return true;
5084 }
5085 if (!can_init_statics) {
5086 // Check if there's a class initializer.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005087 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005088 if (clinit != nullptr) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005089 return false;
5090 }
5091 // Check if there are encoded static values needing initialization.
5092 if (klass->NumStaticFields() != 0) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005093 const dex::ClassDef* dex_class_def = klass->GetClassDef();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005094 DCHECK(dex_class_def != nullptr);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005095 if (dex_class_def->static_values_off_ != 0) {
5096 return false;
5097 }
5098 }
Vladimir Marko889b72d2019-11-12 11:01:13 +00005099 }
5100 // If we are a class we need to initialize all interfaces with default methods when we are
5101 // initialized. Check all of them.
5102 if (!klass->IsInterface()) {
5103 size_t num_interfaces = klass->GetIfTableCount();
5104 for (size_t i = 0; i < num_interfaces; i++) {
5105 ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5106 if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5107 if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07005108 return false;
5109 }
5110 }
5111 }
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005112 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07005113 if (klass->IsInterface() || !klass->HasSuperClass()) {
5114 return true;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005115 }
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005116 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Vladimir Marko889b72d2019-11-12 11:01:13 +00005117 if (super_class->IsInitialized()) {
5118 return true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07005119 }
Vladimir Marko889b72d2019-11-12 11:01:13 +00005120 return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005121}
5122
Mathieu Chartier23369542020-03-04 08:24:11 -08005123bool ClassLinker::InitializeClass(Thread* self,
5124 Handle<mirror::Class> klass,
5125 bool can_init_statics,
5126 bool can_init_parents) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005127 // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5128
5129 // Are we already initialized and therefore done?
5130 // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5131 // an initialized class will never change its state.
5132 if (klass->IsInitialized()) {
5133 return true;
5134 }
5135
5136 // Fast fail if initialization requires a full runtime. Not part of the JLS.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005137 if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005138 return false;
5139 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005140
Ian Rogers7b078e82014-09-10 14:44:24 -07005141 self->AllowThreadSuspension();
Mathieu Chartier23369542020-03-04 08:24:11 -08005142 Runtime* const runtime = Runtime::Current();
5143 const bool stats_enabled = runtime->HasStatsEnabled();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005144 uint64_t t0;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005145 {
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005146 ObjectLock<mirror::Class> lock(self, klass);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005147
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005148 // Re-check under the lock in case another thread initialized ahead of us.
5149 if (klass->IsInitialized()) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005150 return true;
5151 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005152
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005153 // Was the class already found to be erroneous? Done under the lock to match the JLS.
Brian Carlstromd1422f82011-09-28 11:37:09 -07005154 if (klass->IsErroneous()) {
Andreas Gampe7b3063b2019-01-07 14:12:52 -08005155 ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
Brian Carlstromb23eab12014-10-08 17:55:21 -07005156 VlogClassInitializationFailure(klass);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005157 return false;
5158 }
5159
Vladimir Marko72ab6842017-01-20 19:32:50 +00005160 CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5161 << klass->PrettyClass() << ": state=" << klass->GetStatus();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005162
5163 if (!klass->IsVerified()) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00005164 VerifyClass(self, /*verifier_deps= */ nullptr, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005165 if (!klass->IsVerified()) {
5166 // We failed to verify, expect either the klass to be erroneous or verification failed at
5167 // compile time.
5168 if (klass->IsErroneous()) {
Andreas Gampefc49fa02016-04-21 12:21:55 -07005169 // The class is erroneous. This may be a verifier error, or another thread attempted
5170 // verification and/or initialization and failed. We can distinguish those cases by
5171 // whether an exception is already pending.
5172 if (self->IsExceptionPending()) {
5173 // Check that it's a VerifyError.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +01005174 DCHECK(IsVerifyError(self->GetException()));
Andreas Gampefc49fa02016-04-21 12:21:55 -07005175 } else {
5176 // Check that another thread attempted initialization.
5177 DCHECK_NE(0, klass->GetClinitThreadId());
5178 DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5179 // Need to rethrow the previous failure now.
5180 ThrowEarlierClassFailure(klass.Get(), true);
5181 }
Brian Carlstromb23eab12014-10-08 17:55:21 -07005182 VlogClassInitializationFailure(klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005183 } else {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08005184 CHECK(Runtime::Current()->IsAotCompiler());
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01005185 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
Vladimir Markod79b37b2018-11-02 13:06:22 +00005186 self->AssertNoPendingException();
5187 self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
jeffhaoa9b3bf42012-06-06 17:18:39 -07005188 }
Vladimir Markod79b37b2018-11-02 13:06:22 +00005189 self->AssertPendingException();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005190 return false;
Mathieu Chartier524507a2014-08-27 15:28:28 -07005191 } else {
5192 self->AssertNoPendingException();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005193 }
Andreas Gampefc49fa02016-04-21 12:21:55 -07005194
5195 // A separate thread could have moved us all the way to initialized. A "simple" example
5196 // involves a subclass of the current class being initialized at the same time (which
5197 // will implicitly initialize the superclass, if scheduled that way). b/28254258
Vladimir Marko72ab6842017-01-20 19:32:50 +00005198 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
Andreas Gampefc49fa02016-04-21 12:21:55 -07005199 if (klass->IsInitialized()) {
5200 return true;
5201 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005202 }
5203
Vladimir Marko2c64a832018-01-04 11:31:56 +00005204 // If the class is ClassStatus::kInitializing, either this thread is
Brian Carlstromd1422f82011-09-28 11:37:09 -07005205 // initializing higher up the stack or another thread has beat us
5206 // to initializing and we need to wait. Either way, this
5207 // invocation of InitializeClass will not be responsible for
5208 // running <clinit> and will return.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005209 if (klass->GetStatus() == ClassStatus::kInitializing) {
Mathieu Chartier524507a2014-08-27 15:28:28 -07005210 // Could have got an exception during verification.
5211 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005212 VlogClassInitializationFailure(klass);
Mathieu Chartier524507a2014-08-27 15:28:28 -07005213 return false;
5214 }
Elliott Hughes005ab2e2011-09-11 17:15:31 -07005215 // We caught somebody else in the act; was it us?
Elliott Hughesdcc24742011-09-07 14:02:44 -07005216 if (klass->GetClinitThreadId() == self->GetTid()) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005217 // Yes. That's fine. Return so we can continue initializing.
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005218 return true;
5219 }
Brian Carlstromd1422f82011-09-28 11:37:09 -07005220 // No. That's fine. Wait for another thread to finish initializing.
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005221 return WaitForInitializeClass(klass, self, lock);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005222 }
5223
Jeff Haoe2e40342017-07-19 10:45:18 -07005224 // Try to get the oat class's status for this class if the oat file is present. The compiler
5225 // tries to validate superclass descriptors, and writes the result into the oat file.
5226 // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5227 // is different at runtime than it was at compile time, the oat file is rejected. So if the
5228 // oat file is present, the classpaths must match, and the runtime time check can be skipped.
Jeff Hao0cb17282017-07-12 14:51:49 -07005229 bool has_oat_class = false;
Jeff Haoe2e40342017-07-19 10:45:18 -07005230 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5231 ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5232 : OatFile::OatClass::Invalid();
Vladimir Marko2c64a832018-01-04 11:31:56 +00005233 if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
Jeff Hao0cb17282017-07-12 14:51:49 -07005234 !ValidateSuperClassDescriptors(klass)) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005235 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005236 return false;
5237 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005238 self->AllowThreadSuspension();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005239
Vladimir Marko2c64a832018-01-04 11:31:56 +00005240 CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
Andreas Gampe9510ccd2016-04-20 09:55:25 -07005241 << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005242
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005243 // From here out other threads may observe that we're initializing and so changes of state
5244 // require the a notification.
Elliott Hughesdcc24742011-09-07 14:02:44 -07005245 klass->SetClinitThreadId(self->GetTid());
Vladimir Marko2c64a832018-01-04 11:31:56 +00005246 mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005247
Mathieu Chartier23369542020-03-04 08:24:11 -08005248 t0 = stats_enabled ? NanoTime() : 0u;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005249 }
5250
Andreas Gampeaf864702019-07-23 14:05:35 -07005251 uint64_t t_sub = 0;
5252
Brian Carlstrom6d3f72c2013-08-21 18:06:34 -07005253 // Initialize super classes, must be done while initializing for the JLS.
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005254 if (!klass->IsInterface() && klass->HasSuperClass()) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005255 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005256 if (!super_class->IsInitialized()) {
5257 CHECK(!super_class->IsInterface());
5258 CHECK(can_init_parents);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005259 StackHandleScope<1> hs(self);
5260 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
Mathieu Chartier23369542020-03-04 08:24:11 -08005261 uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
Ian Rogers7b078e82014-09-10 14:44:24 -07005262 bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
Mathieu Chartier23369542020-03-04 08:24:11 -08005263 uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005264 if (!super_initialized) {
5265 // The super class was verified ahead of entering initializing, we should only be here if
5266 // the super class became erroneous due to initialization.
Chang Xingadbb91c2017-07-17 11:23:55 -07005267 // For the case of aot compiler, the super class might also be initializing but we don't
5268 // want to process circular dependencies in pre-compile.
5269 CHECK(self->IsExceptionPending())
Brian Carlstromf3632832014-05-20 15:36:53 -07005270 << "Super class initialization failed for "
David Sehr709b0702016-10-13 09:12:37 -07005271 << handle_scope_super->PrettyDescriptor()
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005272 << " that has unexpected status " << handle_scope_super->GetStatus()
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005273 << "\nPending exception:\n"
Nicolas Geoffray14691c52015-03-05 10:40:17 +00005274 << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005275 ObjectLock<mirror::Class> lock(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005276 // Initialization failed because the super-class is erroneous.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005277 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005278 return false;
5279 }
Andreas Gampeaf864702019-07-23 14:05:35 -07005280 t_sub = super_t1 - super_t0;
Ian Rogers1bddec32012-02-04 12:27:34 -08005281 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005282 }
5283
Alex Lighteb7c1442015-08-31 13:17:42 -07005284 if (!klass->IsInterface()) {
5285 // Initialize interfaces with default methods for the JLS.
5286 size_t num_direct_interfaces = klass->NumDirectInterfaces();
Alex Light56a40f52015-10-14 11:07:41 -07005287 // Only setup the (expensive) handle scope if we actually need to.
5288 if (UNLIKELY(num_direct_interfaces > 0)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07005289 StackHandleScope<1> hs_iface(self);
Alex Light56a40f52015-10-14 11:07:41 -07005290 MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5291 for (size_t i = 0; i < num_direct_interfaces; i++) {
Vladimir Markob10668c2021-06-10 09:52:53 +01005292 handle_scope_iface.Assign(klass->GetDirectInterface(i));
Vladimir Marko8d6768d2017-03-14 10:13:21 +00005293 CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
Alex Light56a40f52015-10-14 11:07:41 -07005294 CHECK(handle_scope_iface->IsInterface());
5295 if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5296 // We have already done this for this interface. Skip it.
5297 continue;
5298 }
5299 // We cannot just call initialize class directly because we need to ensure that ALL
5300 // interfaces with default methods are initialized. Non-default interface initialization
5301 // will not affect other non-default super-interfaces.
Mathieu Chartier23369542020-03-04 08:24:11 -08005302 // This is not very precise, misses all walking.
5303 uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
Alex Light56a40f52015-10-14 11:07:41 -07005304 bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5305 handle_scope_iface,
5306 can_init_statics,
5307 can_init_parents);
Mathieu Chartier23369542020-03-04 08:24:11 -08005308 uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
Alex Light56a40f52015-10-14 11:07:41 -07005309 if (!iface_initialized) {
5310 ObjectLock<mirror::Class> lock(self, klass);
5311 // Initialization failed because one of our interfaces with default methods is erroneous.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005312 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Alex Light56a40f52015-10-14 11:07:41 -07005313 return false;
5314 }
Andreas Gampeaf864702019-07-23 14:05:35 -07005315 t_sub += inf_t1 - inf_t0;
Alex Lighteb7c1442015-08-31 13:17:42 -07005316 }
5317 }
5318 }
5319
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005320 const size_t num_static_fields = klass->NumStaticFields();
5321 if (num_static_fields > 0) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005322 const dex::ClassDef* dex_class_def = klass->GetClassDef();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005323 CHECK(dex_class_def != nullptr);
Hiroshi Yamauchi67ef46a2014-08-21 15:59:43 -07005324 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005325 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
Mathieu Chartierf8322842014-05-16 10:59:25 -07005326 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005327
5328 // Eagerly fill in static fields so that the we don't have to do as many expensive
5329 // Class::FindStaticField in ResolveField.
5330 for (size_t i = 0; i < num_static_fields; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07005331 ArtField* field = klass->GetStaticField(i);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005332 const uint32_t field_idx = field->GetDexFieldIndex();
David Srbecky5de5efe2021-02-15 21:23:00 +00005333 ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005334 if (resolved_field == nullptr) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01005335 // Populating cache of a dex file which defines `klass` should always be allowed.
David Brazdilf50ac102018-10-17 18:00:06 +01005336 DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5337 field,
5338 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5339 hiddenapi::AccessMethod::kNone));
David Srbecky5de5efe2021-02-15 21:23:00 +00005340 dex_cache->SetResolvedField(field_idx, field);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07005341 } else {
5342 DCHECK_EQ(field, resolved_field);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005343 }
5344 }
5345
Vladimir Markoe11dd502017-12-08 14:09:45 +00005346 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5347 class_loader,
David Sehr9323e6e2016-09-13 08:58:35 -07005348 this,
5349 *dex_class_def);
Vladimir Markoe11dd502017-12-08 14:09:45 +00005350 const DexFile& dex_file = *dex_cache->GetDexFile();
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005351
Hiroshi Yamauchi88500112014-08-22 12:12:56 -07005352 if (value_it.HasNext()) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005353 ClassAccessor accessor(dex_file, *dex_class_def);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005354 CHECK(can_init_statics);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005355 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5356 if (!value_it.HasNext()) {
5357 break;
5358 }
5359 ArtField* art_field = ResolveField(field.GetIndex(),
5360 dex_cache,
5361 class_loader,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07005362 /* is_static= */ true);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005363 if (Runtime::Current()->IsActiveTransaction()) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005364 value_it.ReadValueToField<true>(art_field);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005365 } else {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005366 value_it.ReadValueToField<false>(art_field);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005367 }
Mathieu Chartierda595be2016-08-10 13:57:39 -07005368 if (self->IsExceptionPending()) {
5369 break;
5370 }
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005371 value_it.Next();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005372 }
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005373 DCHECK(self->IsExceptionPending() || !value_it.HasNext());
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005374 }
5375 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005376
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005377
Mathieu Chartierda595be2016-08-10 13:57:39 -07005378 if (!self->IsExceptionPending()) {
5379 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5380 if (clinit != nullptr) {
5381 CHECK(can_init_statics);
5382 JValue result;
5383 clinit->Invoke(self, nullptr, 0, &result, "V");
5384 }
5385 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005386 self->AllowThreadSuspension();
Mathieu Chartier23369542020-03-04 08:24:11 -08005387 uint64_t t1 = stats_enabled ? NanoTime() : 0u;
Elliott Hughes83df2ac2011-10-11 16:37:54 -07005388
Vladimir Markobf121912019-06-04 13:49:05 +01005389 VisiblyInitializedCallback* callback = nullptr;
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005390 bool success = true;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005391 {
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005392 ObjectLock<mirror::Class> lock(self, klass);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005393
5394 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005395 WrapExceptionInInitializer(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005396 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005397 success = false;
Sebastien Hertz1c80bec2015-02-03 11:58:06 +01005398 } else if (Runtime::Current()->IsTransactionAborted()) {
5399 // The exception thrown when the transaction aborted has been caught and cleared
5400 // so we need to throw it again now.
David Sehr709b0702016-10-13 09:12:37 -07005401 VLOG(compiler) << "Return from class initializer of "
5402 << mirror::Class::PrettyDescriptor(klass.Get())
Sebastien Hertzbd9cf9f2015-03-03 12:16:13 +01005403 << " without exception while transaction was aborted: re-throw it now.";
Mathieu Chartier23369542020-03-04 08:24:11 -08005404 runtime->ThrowTransactionAbortError(self);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005405 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Sebastien Hertz1c80bec2015-02-03 11:58:06 +01005406 success = false;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005407 } else {
Mathieu Chartier23369542020-03-04 08:24:11 -08005408 if (stats_enabled) {
5409 RuntimeStats* global_stats = runtime->GetStats();
5410 RuntimeStats* thread_stats = self->GetStats();
5411 ++global_stats->class_init_count;
5412 ++thread_stats->class_init_count;
5413 global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5414 thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5415 }
Ian Rogerse6bb3b22013-08-19 21:51:45 -07005416 // Set the class as initialized except if failed to initialize static fields.
Vladimir Markobf121912019-06-04 13:49:05 +01005417 callback = MarkClassInitialized(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005418 if (VLOG_IS_ON(class_linker)) {
Ian Rogers1ff3c982014-08-12 02:30:58 -07005419 std::string temp;
5420 LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
Mathieu Chartierf8322842014-05-16 10:59:25 -07005421 klass->GetLocation();
Brian Carlstromae826982011-11-09 01:33:42 -08005422 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005423 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005424 }
Vladimir Markobf121912019-06-04 13:49:05 +01005425 if (callback != nullptr) {
5426 callback->MakeVisible(self);
5427 }
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005428 return success;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005429}
5430
Alex Lighteb7c1442015-08-31 13:17:42 -07005431// We recursively run down the tree of interfaces. We need to do this in the order they are declared
5432// and perform the initialization only on those interfaces that contain default methods.
5433bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5434 Handle<mirror::Class> iface,
5435 bool can_init_statics,
5436 bool can_init_parents) {
5437 CHECK(iface->IsInterface());
5438 size_t num_direct_ifaces = iface->NumDirectInterfaces();
Alex Light56a40f52015-10-14 11:07:41 -07005439 // Only create the (expensive) handle scope if we need it.
5440 if (UNLIKELY(num_direct_ifaces > 0)) {
5441 StackHandleScope<1> hs(self);
5442 MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5443 // First we initialize all of iface's super-interfaces recursively.
5444 for (size_t i = 0; i < num_direct_ifaces; i++) {
Vladimir Markob10668c2021-06-10 09:52:53 +01005445 ObjPtr<mirror::Class> super_iface = iface->GetDirectInterface(i);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00005446 CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
Alex Light56a40f52015-10-14 11:07:41 -07005447 if (!super_iface->HasBeenRecursivelyInitialized()) {
5448 // Recursive step
5449 handle_super_iface.Assign(super_iface);
5450 if (!InitializeDefaultInterfaceRecursive(self,
5451 handle_super_iface,
5452 can_init_statics,
5453 can_init_parents)) {
5454 return false;
5455 }
Alex Lighteb7c1442015-08-31 13:17:42 -07005456 }
5457 }
5458 }
5459
5460 bool result = true;
5461 // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5462 // initialize if we don't have default methods.
5463 if (iface->HasDefaultMethods()) {
5464 result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5465 }
5466
5467 // Mark that this interface has undergone recursive default interface initialization so we know we
5468 // can skip it on any later class initializations. We do this even if we are not a default
5469 // interface since we can still avoid the traversal. This is purely a performance optimization.
5470 if (result) {
5471 // TODO This should be done in a better way
Andreas Gampe976b2982018-03-02 17:54:22 -08005472 // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5473 // interface. It is bad (Java) style, but not impossible. Marking the recursive
5474 // initialization is a performance optimization (to avoid another idempotent visit
5475 // for other implementing classes/interfaces), and can be revisited later.
5476 ObjectTryLock<mirror::Class> lock(self, iface);
5477 if (lock.Acquired()) {
5478 iface->SetRecursivelyInitialized();
5479 }
Alex Lighteb7c1442015-08-31 13:17:42 -07005480 }
5481 return result;
5482}
5483
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005484bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5485 Thread* self,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005486 ObjectLock<mirror::Class>& lock)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005487 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005488 while (true) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -07005489 self->AssertNoPendingException();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005490 CHECK(!klass->IsInitialized());
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005491 lock.WaitIgnoringInterrupts();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005492
5493 // When we wake up, repeat the test for init-in-progress. If
5494 // there's an exception pending (only possible if
Brian Carlstromb23eab12014-10-08 17:55:21 -07005495 // we were not using WaitIgnoringInterrupts), bail out.
Brian Carlstromd1422f82011-09-28 11:37:09 -07005496 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005497 WrapExceptionInInitializer(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005498 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005499 return false;
5500 }
5501 // Spurious wakeup? Go back to waiting.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005502 if (klass->GetStatus() == ClassStatus::kInitializing) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005503 continue;
5504 }
Vladimir Marko2c64a832018-01-04 11:31:56 +00005505 if (klass->GetStatus() == ClassStatus::kVerified &&
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08005506 Runtime::Current()->IsAotCompiler()) {
Ian Rogers3d1548d2012-09-24 14:08:03 -07005507 // Compile time initialization failed.
5508 return false;
5509 }
Brian Carlstromd1422f82011-09-28 11:37:09 -07005510 if (klass->IsErroneous()) {
5511 // The caller wants an exception, but it was thrown in a
5512 // different thread. Synthesize one here.
Brian Carlstromdf143242011-10-10 18:05:34 -07005513 ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
David Sehr709b0702016-10-13 09:12:37 -07005514 klass->PrettyDescriptor().c_str());
Brian Carlstromb23eab12014-10-08 17:55:21 -07005515 VlogClassInitializationFailure(klass);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005516 return false;
5517 }
5518 if (klass->IsInitialized()) {
5519 return true;
5520 }
David Sehr709b0702016-10-13 09:12:37 -07005521 LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
Mathieu Chartierc528dba2013-11-26 12:00:11 -08005522 << klass->GetStatus();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005523 }
Ian Rogers07140832014-09-30 15:43:59 -07005524 UNREACHABLE();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005525}
5526
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005527static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5528 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005529 ArtMethod* method,
5530 ArtMethod* m)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005531 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005532 DCHECK(Thread::Current()->IsExceptionPending());
5533 DCHECK(!m->IsProxyMethod());
5534 const DexFile* dex_file = m->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005535 const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
5536 const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
Andreas Gampea5b09a62016-11-17 15:21:22 -08005537 dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
David Sehr709b0702016-10-13 09:12:37 -07005538 std::string return_type = dex_file->PrettyType(return_type_idx);
5539 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005540 ThrowWrappedLinkageError(klass.Get(),
5541 "While checking class %s method %s signature against %s %s: "
5542 "Failed to resolve return type %s with %s",
David Sehr709b0702016-10-13 09:12:37 -07005543 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5544 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005545 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005546 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005547 return_type.c_str(), class_loader.c_str());
5548}
5549
5550static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
5551 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005552 ArtMethod* method,
5553 ArtMethod* m,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005554 uint32_t index,
Andreas Gampea5b09a62016-11-17 15:21:22 -08005555 dex::TypeIndex arg_type_idx)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005556 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005557 DCHECK(Thread::Current()->IsExceptionPending());
5558 DCHECK(!m->IsProxyMethod());
5559 const DexFile* dex_file = m->GetDexFile();
David Sehr709b0702016-10-13 09:12:37 -07005560 std::string arg_type = dex_file->PrettyType(arg_type_idx);
5561 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005562 ThrowWrappedLinkageError(klass.Get(),
5563 "While checking class %s method %s signature against %s %s: "
5564 "Failed to resolve arg %u type %s with %s",
David Sehr709b0702016-10-13 09:12:37 -07005565 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5566 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005567 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005568 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005569 index, arg_type.c_str(), class_loader.c_str());
5570}
5571
5572static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
5573 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005574 ArtMethod* method,
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005575 const std::string& error_msg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005576 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005577 ThrowLinkageError(klass.Get(),
5578 "Class %s method %s resolves differently in %s %s: %s",
David Sehr709b0702016-10-13 09:12:37 -07005579 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5580 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005581 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005582 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005583 error_msg.c_str());
5584}
5585
Ian Rogersb5fb2072014-12-02 17:22:02 -08005586static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005587 Handle<mirror::Class> klass,
5588 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005589 ArtMethod* method1,
5590 ArtMethod* method2)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005591 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogersb5fb2072014-12-02 17:22:02 -08005592 {
5593 StackHandleScope<1> hs(self);
Vladimir Markob45528c2017-07-27 14:14:28 +01005594 Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08005595 if (UNLIKELY(return_type == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005596 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005597 return false;
5598 }
Vladimir Markob45528c2017-07-27 14:14:28 +01005599 ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005600 if (UNLIKELY(other_return_type == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005601 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005602 return false;
5603 }
Vladimir Marko862f43c2015-02-10 18:22:57 +00005604 if (UNLIKELY(other_return_type != return_type.Get())) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005605 ThrowSignatureMismatch(klass, super_klass, method1,
5606 StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
David Sehr709b0702016-10-13 09:12:37 -07005607 return_type->PrettyClassAndClassLoader().c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005608 return_type.Get(),
David Sehr709b0702016-10-13 09:12:37 -07005609 other_return_type->PrettyClassAndClassLoader().c_str(),
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005610 other_return_type.Ptr()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005611 return false;
5612 }
5613 }
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005614 const dex::TypeList* types1 = method1->GetParameterTypeList();
5615 const dex::TypeList* types2 = method2->GetParameterTypeList();
Ian Rogersb5fb2072014-12-02 17:22:02 -08005616 if (types1 == nullptr) {
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005617 if (types2 != nullptr && types2->Size() != 0) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005618 ThrowSignatureMismatch(klass, super_klass, method1,
5619 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005620 method2->PrettyMethod(true).c_str()));
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005621 return false;
5622 }
5623 return true;
Ian Rogersb5fb2072014-12-02 17:22:02 -08005624 } else if (UNLIKELY(types2 == nullptr)) {
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005625 if (types1->Size() != 0) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005626 ThrowSignatureMismatch(klass, super_klass, method1,
5627 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005628 method2->PrettyMethod(true).c_str()));
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005629 return false;
5630 }
5631 return true;
Ian Rogersb5fb2072014-12-02 17:22:02 -08005632 }
5633 uint32_t num_types = types1->Size();
5634 if (UNLIKELY(num_types != types2->Size())) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005635 ThrowSignatureMismatch(klass, super_klass, method1,
5636 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005637 method2->PrettyMethod(true).c_str()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005638 return false;
5639 }
5640 for (uint32_t i = 0; i < num_types; ++i) {
Vladimir Marko862f43c2015-02-10 18:22:57 +00005641 StackHandleScope<1> hs(self);
Andreas Gampea5b09a62016-11-17 15:21:22 -08005642 dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
Vladimir Marko862f43c2015-02-10 18:22:57 +00005643 Handle<mirror::Class> param_type(hs.NewHandle(
Vladimir Markob45528c2017-07-27 14:14:28 +01005644 method1->ResolveClassFromTypeIndex(param_type_idx)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08005645 if (UNLIKELY(param_type == nullptr)) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005646 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005647 method1, i, param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005648 return false;
5649 }
Andreas Gampea5b09a62016-11-17 15:21:22 -08005650 dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005651 ObjPtr<mirror::Class> other_param_type =
Vladimir Markob45528c2017-07-27 14:14:28 +01005652 method2->ResolveClassFromTypeIndex(other_param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005653 if (UNLIKELY(other_param_type == nullptr)) {
5654 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005655 method2, i, other_param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005656 return false;
5657 }
Vladimir Marko862f43c2015-02-10 18:22:57 +00005658 if (UNLIKELY(param_type.Get() != other_param_type)) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005659 ThrowSignatureMismatch(klass, super_klass, method1,
5660 StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
5661 i,
David Sehr709b0702016-10-13 09:12:37 -07005662 param_type->PrettyClassAndClassLoader().c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005663 param_type.Get(),
David Sehr709b0702016-10-13 09:12:37 -07005664 other_param_type->PrettyClassAndClassLoader().c_str(),
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005665 other_param_type.Ptr()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005666 return false;
5667 }
5668 }
5669 return true;
5670}
5671
5672
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005673bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005674 if (klass->IsInterface()) {
5675 return true;
5676 }
Ian Rogers151f2212014-05-06 11:27:27 -07005677 // Begin with the methods local to the superclass.
Ian Rogersded66a02014-10-28 18:12:55 -07005678 Thread* self = Thread::Current();
Mathieu Chartiere401d142015-04-22 13:56:20 -07005679 StackHandleScope<1> hs(self);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005680 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005681 if (klass->HasSuperClass() &&
5682 klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005683 super_klass.Assign(klass->GetSuperClass());
Mingyao Yang2cdbad72014-07-16 10:44:41 -07005684 for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005685 auto* m = klass->GetVTableEntry(i, image_pointer_size_);
5686 auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
5687 if (m != super_m) {
Vladimir Marko942fd312017-01-16 20:52:19 +00005688 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5689 klass,
5690 super_klass,
5691 m,
5692 super_m))) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005693 self->AssertPendingException();
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005694 return false;
5695 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005696 }
5697 }
5698 }
Brian Carlstrom4b620ff2011-09-11 01:11:01 -07005699 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005700 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
5701 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
5702 uint32_t num_methods = super_klass->NumVirtualMethods();
Ian Rogers151f2212014-05-06 11:27:27 -07005703 for (uint32_t j = 0; j < num_methods; ++j) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005704 auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
5705 j, image_pointer_size_);
5706 auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
5707 if (m != super_m) {
Vladimir Marko942fd312017-01-16 20:52:19 +00005708 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5709 klass,
5710 super_klass,
5711 m,
5712 super_m))) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005713 self->AssertPendingException();
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005714 return false;
5715 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005716 }
5717 }
5718 }
5719 }
5720 return true;
5721}
5722
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005723bool ClassLinker::EnsureInitialized(Thread* self,
5724 Handle<mirror::Class> c,
5725 bool can_init_fields,
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005726 bool can_init_parents) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08005727 DCHECK(c != nullptr);
Igor Murashkin86083f72017-10-27 10:59:04 -07005728
Mathieu Chartier524507a2014-08-27 15:28:28 -07005729 if (c->IsInitialized()) {
Vladimir Marko8e110652019-07-30 10:14:41 +01005730 // If we've seen an initialized but not visibly initialized class
5731 // many times, request visible initialization.
5732 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
5733 // Thanks to the x86 memory model classes skip the initialized status.
5734 DCHECK(c->IsVisiblyInitialized());
5735 } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
5736 if (self->IncrementMakeVisiblyInitializedCounter()) {
5737 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
5738 }
5739 }
Mathieu Chartier524507a2014-08-27 15:28:28 -07005740 return true;
5741 }
Igor Murashkin86083f72017-10-27 10:59:04 -07005742 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5743 //
5744 // Ensure the bitstring is initialized before any of the class initialization
5745 // logic occurs. Once a class initializer starts running, objects can
5746 // escape into the heap and use the subtype checking code.
5747 //
5748 // Note: A class whose SubtypeCheckInfo is at least Initialized means it
5749 // can be used as a source for the IsSubClass check, and that all ancestors
5750 // of the class are Assigned (can be used as a target for IsSubClass check)
5751 // or Overflowed (can be used as a source for IsSubClass check).
Vladimir Marko305c38b2018-02-14 11:50:07 +00005752 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -07005753 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
Vladimir Marko38b8b252018-01-02 19:07:06 +00005754 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
Igor Murashkin86083f72017-10-27 10:59:04 -07005755 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
5756 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005757 const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
Mathieu Chartier524507a2014-08-27 15:28:28 -07005758 if (!success) {
5759 if (can_init_fields && can_init_parents) {
David Sehr709b0702016-10-13 09:12:37 -07005760 CHECK(self->IsExceptionPending()) << c->PrettyClass();
Vladimir Markoac576912021-03-31 11:16:22 +01005761 } else {
5762 // There may or may not be an exception pending. If there is, clear it.
5763 // We propagate the exception only if we can initialize fields and parents.
5764 self->ClearException();
Mathieu Chartier524507a2014-08-27 15:28:28 -07005765 }
5766 } else {
5767 self->AssertNoPendingException();
Ian Rogers595799e2012-01-11 17:32:51 -08005768 }
5769 return success;
Elliott Hughesf4c21c92011-08-19 17:31:31 -07005770}
5771
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005772void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
5773 ObjPtr<mirror::Class> new_class) {
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005774 DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005775 for (ArtField& field : new_class->GetIFields()) {
5776 if (field.GetDeclaringClass() == temp_class) {
5777 field.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005778 }
5779 }
5780
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005781 DCHECK_EQ(temp_class->NumStaticFields(), 0u);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005782 for (ArtField& field : new_class->GetSFields()) {
5783 if (field.GetDeclaringClass() == temp_class) {
5784 field.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005785 }
5786 }
5787
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005788 DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005789 DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
Alex Lighte64300b2015-12-15 15:02:47 -08005790 for (auto& method : new_class->GetMethods(image_pointer_size_)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005791 if (method.GetDeclaringClass() == temp_class) {
5792 method.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005793 }
5794 }
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005795
5796 // Make sure the remembered set and mod-union tables know that we updated some of the native
5797 // roots.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07005798 WriteBarrier::ForEveryFieldWrite(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005799}
5800
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005801void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08005802 CHECK(class_loader->GetAllocator() == nullptr);
5803 CHECK(class_loader->GetClassTable() == nullptr);
5804 Thread* const self = Thread::Current();
5805 ClassLoaderData data;
Ian Rogers55256cb2017-12-21 17:07:11 -08005806 data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
Mathieu Chartier5b830502016-03-02 10:30:23 -08005807 // Create and set the class table.
5808 data.class_table = new ClassTable;
5809 class_loader->SetClassTable(data.class_table);
5810 // Create and set the linear allocator.
5811 data.allocator = Runtime::Current()->CreateLinearAlloc();
5812 class_loader->SetAllocator(data.allocator);
5813 // Add to the list so that we know to free the data later.
5814 class_loaders_.push_back(data);
5815}
5816
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005817ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier6b069532015-08-05 15:08:12 -07005818 if (class_loader == nullptr) {
Andreas Gampe2af99022017-04-25 08:32:59 -07005819 return boot_class_table_.get();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005820 }
Mathieu Chartier6b069532015-08-05 15:08:12 -07005821 ClassTable* class_table = class_loader->GetClassTable();
5822 if (class_table == nullptr) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08005823 RegisterClassLoader(class_loader);
5824 class_table = class_loader->GetClassTable();
5825 DCHECK(class_table != nullptr);
Mathieu Chartier6b069532015-08-05 15:08:12 -07005826 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005827 return class_table;
5828}
5829
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005830ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampe2af99022017-04-25 08:32:59 -07005831 return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005832}
5833
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005834static ImTable* FindSuperImt(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005835 REQUIRES_SHARED(Locks::mutator_lock_) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005836 while (klass->HasSuperClass()) {
5837 klass = klass->GetSuperClass();
5838 if (klass->ShouldHaveImt()) {
5839 return klass->GetImt(pointer_size);
5840 }
5841 }
5842 return nullptr;
5843}
5844
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005845bool ClassLinker::LinkClass(Thread* self,
5846 const char* descriptor,
5847 Handle<mirror::Class> klass,
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005848 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005849 MutableHandle<mirror::Class>* h_new_class_out) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005850 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005851
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005852 if (!LinkSuperClass(klass)) {
5853 return false;
5854 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005855 ArtMethod* imt_data[ImTable::kSize];
5856 // If there are any new conflicts compared to super class.
5857 bool new_conflict = false;
Nicolas Geoffray918dcea2017-07-21 07:58:14 +00005858 std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005859 if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005860 return false;
5861 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005862 if (!LinkInstanceFields(self, klass)) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005863 return false;
5864 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005865 size_t class_size;
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005866 if (!LinkStaticFields(self, klass, &class_size)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -07005867 return false;
5868 }
5869 CreateReferenceInstanceOffsets(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005870 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005871
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005872 ImTable* imt = nullptr;
5873 if (klass->ShouldHaveImt()) {
5874 // If there are any new conflicts compared to the super class we can not make a copy. There
5875 // can be cases where both will have a conflict method at the same slot without having the same
5876 // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
5877 // will possibly create a table that is incorrect for either of the classes.
5878 // Same IMT with new_conflict does not happen very often.
5879 if (!new_conflict) {
5880 ImTable* super_imt = FindSuperImt(klass.Get(), image_pointer_size_);
5881 if (super_imt != nullptr) {
5882 bool imt_equals = true;
5883 for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
5884 imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
5885 }
5886 if (imt_equals) {
5887 imt = super_imt;
5888 }
5889 }
5890 }
5891 if (imt == nullptr) {
5892 LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
5893 imt = reinterpret_cast<ImTable*>(
5894 allocator->Alloc(self, ImTable::SizeInBytes(image_pointer_size_)));
5895 if (imt == nullptr) {
5896 return false;
5897 }
5898 imt->Populate(imt_data, image_pointer_size_);
5899 }
5900 }
5901
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005902 if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
5903 // We don't need to retire this class as it has no embedded tables or it was created the
5904 // correct size during class linker initialization.
David Sehr709b0702016-10-13 09:12:37 -07005905 CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005906
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005907 if (klass->ShouldHaveEmbeddedVTable()) {
5908 klass->PopulateEmbeddedVTable(image_pointer_size_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005909 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005910 if (klass->ShouldHaveImt()) {
5911 klass->SetImt(imt, image_pointer_size_);
5912 }
Mingyao Yang063fc772016-08-02 11:02:54 -07005913
5914 // Update CHA info based on whether we override methods.
5915 // Have to do this before setting the class as resolved which allows
5916 // instantiation of klass.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00005917 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07005918 cha_->UpdateAfterLoadingOf(klass);
5919 }
Nicolas Geoffray918dcea2017-07-21 07:58:14 +00005920
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005921 // This will notify waiters on klass that saw the not yet resolved
5922 // class in the class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005923 mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005924 h_new_class_out->Assign(klass.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005925 } else {
5926 CHECK(!klass->IsResolved());
5927 // Retire the temporary class and create the correctly sized resolved class.
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005928 StackHandleScope<1> hs(self);
Vladimir Marko3068d582019-05-28 16:39:29 +01005929 Handle<mirror::Class> h_new_class =
5930 hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
Mathieu Chartier3ee25bb2015-08-10 10:13:02 -07005931 // Set arrays to null since we don't want to have multiple classes with the same ArtField or
5932 // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
5933 // may not see any references to the target space and clean the card for a class if another
5934 // class had the same array pointer.
Alex Lighte64300b2015-12-15 15:02:47 -08005935 klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005936 klass->SetSFieldsPtrUnchecked(nullptr);
5937 klass->SetIFieldsPtrUnchecked(nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08005938 if (UNLIKELY(h_new_class == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005939 self->AssertPendingOOMException();
Vladimir Marko2c64a832018-01-04 11:31:56 +00005940 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005941 return false;
5942 }
5943
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005944 CHECK_EQ(h_new_class->GetClassSize(), class_size);
5945 ObjectLock<mirror::Class> lock(self, h_new_class);
5946 FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005947
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00005948 if (LIKELY(descriptor != nullptr)) {
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005949 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko0984e482019-03-27 16:41:41 +00005950 const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005951 ClassTable* const table = InsertClassTableForClassLoader(class_loader);
Vladimir Marko0984e482019-03-27 16:41:41 +00005952 const ObjPtr<mirror::Class> existing =
5953 table->UpdateClass(descriptor, h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
Mathieu Chartier05aa4d32015-09-19 12:44:38 -07005954 if (class_loader != nullptr) {
5955 // We updated the class in the class table, perform the write barrier so that the GC knows
5956 // about the change.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07005957 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier05aa4d32015-09-19 12:44:38 -07005958 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005959 CHECK_EQ(existing, klass.Get());
Vladimir Marko1998cd02017-01-13 13:02:58 +00005960 if (log_new_roots_) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005961 new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get()));
5962 }
5963 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005964
Mingyao Yang063fc772016-08-02 11:02:54 -07005965 // Update CHA info based on whether we override methods.
5966 // Have to do this before setting the class as resolved which allows
5967 // instantiation of klass.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00005968 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07005969 cha_->UpdateAfterLoadingOf(h_new_class);
5970 }
Mingyao Yang063fc772016-08-02 11:02:54 -07005971
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005972 // This will notify waiters on temp class that saw the not yet resolved class in the
5973 // class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005974 mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005975
Vladimir Marko2c64a832018-01-04 11:31:56 +00005976 CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005977 // This will notify waiters on new_class that saw the not yet resolved
5978 // class in the class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005979 mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005980 // Return the new class.
5981 h_new_class_out->Assign(h_new_class.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005982 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005983 return true;
5984}
5985
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005986bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005987 CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005988 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
Andreas Gampea5b09a62016-11-17 15:21:22 -08005989 dex::TypeIndex super_class_idx = class_def.superclass_idx_;
5990 if (super_class_idx.IsValid()) {
Roland Levillain90328ac2016-05-18 12:25:38 +01005991 // Check that a class does not inherit from itself directly.
5992 //
5993 // TODO: This is a cheap check to detect the straightforward case
5994 // of a class extending itself (b/28685551), but we should do a
5995 // proper cycle detection on loaded classes, to detect all cases
5996 // of class circularity errors (b/28830038).
5997 if (super_class_idx == class_def.class_idx_) {
5998 ThrowClassCircularityError(klass.Get(),
5999 "Class %s extends itself",
David Sehr709b0702016-10-13 09:12:37 -07006000 klass->PrettyDescriptor().c_str());
Roland Levillain90328ac2016-05-18 12:25:38 +01006001 return false;
6002 }
6003
Vladimir Marko666ee3d2017-12-11 18:37:36 +00006004 ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006005 if (super_class == nullptr) {
Brian Carlstrom65ca0772011-09-24 16:03:08 -07006006 DCHECK(Thread::Current()->IsExceptionPending());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006007 return false;
6008 }
Ian Rogersbe125a92012-01-11 15:19:49 -08006009 // Verify
6010 if (!klass->CanAccess(super_class)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006011 ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
David Sehr709b0702016-10-13 09:12:37 -07006012 super_class->PrettyDescriptor().c_str(),
6013 klass->PrettyDescriptor().c_str());
Ian Rogersbe125a92012-01-11 15:19:49 -08006014 return false;
6015 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006016 CHECK(super_class->IsResolved());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006017 klass->SetSuperClass(super_class);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006018 }
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006019 const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006020 if (interfaces != nullptr) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006021 for (size_t i = 0; i < interfaces->Size(); i++) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08006022 dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
Vladimir Marko666ee3d2017-12-11 18:37:36 +00006023 ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006024 if (interface == nullptr) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006025 DCHECK(Thread::Current()->IsExceptionPending());
6026 return false;
6027 }
6028 // Verify
6029 if (!klass->CanAccess(interface)) {
6030 // TODO: the RI seemed to ignore this in my testing.
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006031 ThrowIllegalAccessError(klass.Get(),
6032 "Interface %s implemented by class %s is inaccessible",
David Sehr709b0702016-10-13 09:12:37 -07006033 interface->PrettyDescriptor().c_str(),
6034 klass->PrettyDescriptor().c_str());
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006035 return false;
6036 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006037 }
6038 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07006039 // Mark the class as loaded.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006040 mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006041 return true;
6042}
6043
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07006044bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006045 CHECK(!klass->IsPrimitive());
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006046 ObjPtr<mirror::Class> super = klass->GetSuperClass();
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006047 ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6048 if (klass.Get() == object_class) {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006049 if (super != nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006050 ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006051 return false;
6052 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006053 return true;
6054 }
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006055 if (super == nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006056 ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
David Sehr709b0702016-10-13 09:12:37 -07006057 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006058 return false;
6059 }
6060 // Verify
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006061 if (klass->IsInterface() && super != object_class) {
Vladimir Marko1fcae9f2017-11-28 14:14:19 +00006062 ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6063 return false;
6064 }
Vladimir Markob43b2d82017-07-18 17:46:38 +01006065 if (super->IsFinal()) {
6066 ThrowVerifyError(klass.Get(),
6067 "Superclass %s of %s is declared final",
6068 super->PrettyDescriptor().c_str(),
6069 klass->PrettyDescriptor().c_str());
6070 return false;
6071 }
6072 if (super->IsInterface()) {
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006073 ThrowIncompatibleClassChangeError(klass.Get(),
Vladimir Markob43b2d82017-07-18 17:46:38 +01006074 "Superclass %s of %s is an interface",
David Sehr709b0702016-10-13 09:12:37 -07006075 super->PrettyDescriptor().c_str(),
Vladimir Markob43b2d82017-07-18 17:46:38 +01006076 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006077 return false;
6078 }
6079 if (!klass->CanAccess(super)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006080 ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
David Sehr709b0702016-10-13 09:12:37 -07006081 super->PrettyDescriptor().c_str(),
6082 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006083 return false;
6084 }
Elliott Hughes20cde902011-10-04 17:37:27 -07006085
Brian Carlstromf3632832014-05-20 15:36:53 -07006086 // Inherit kAccClassIsFinalizable from the superclass in case this
6087 // class doesn't override finalize.
Elliott Hughes20cde902011-10-04 17:37:27 -07006088 if (super->IsFinalizable()) {
6089 klass->SetFinalizable();
6090 }
6091
Mathieu Chartiere4275c02015-08-06 15:34:15 -07006092 // Inherit class loader flag form super class.
6093 if (super->IsClassLoaderClass()) {
6094 klass->SetClassLoaderClass();
6095 }
6096
Elliott Hughes2da50362011-10-10 16:57:08 -07006097 // Inherit reference flags (if any) from the superclass.
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07006098 uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
Elliott Hughes2da50362011-10-10 16:57:08 -07006099 if (reference_flags != 0) {
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07006100 CHECK_EQ(klass->GetClassFlags(), 0u);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07006101 klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
Elliott Hughes2da50362011-10-10 16:57:08 -07006102 }
Elliott Hughes72ee0ae2011-10-10 17:31:28 -07006103 // Disallow custom direct subclasses of java.lang.ref.Reference.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006104 if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006105 ThrowLinkageError(klass.Get(),
Ian Rogers62d6c772013-02-27 08:32:07 -08006106 "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
David Sehr709b0702016-10-13 09:12:37 -07006107 klass->PrettyDescriptor().c_str());
Elliott Hughes72ee0ae2011-10-10 17:31:28 -07006108 return false;
6109 }
Elliott Hughes2da50362011-10-10 16:57:08 -07006110
Ian Rogers7dfb28c2013-08-22 08:18:36 -07006111 if (kIsDebugBuild) {
6112 // Ensure super classes are fully resolved prior to resolving fields..
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006113 while (super != nullptr) {
Ian Rogers7dfb28c2013-08-22 08:18:36 -07006114 CHECK(super->IsResolved());
6115 super = super->GetSuperClass();
6116 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006117 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006118 return true;
6119}
6120
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006121// Comparator for name and signature of a method, used in finding overriding methods. Implementation
6122// avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6123// caches in the implementation below.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01006124class MethodNameAndSignatureComparator final : public ValueObject {
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006125 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -07006126 explicit MethodNameAndSignatureComparator(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006127 REQUIRES_SHARED(Locks::mutator_lock_) :
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006128 dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006129 name_view_() {
David Sehr709b0702016-10-13 09:12:37 -07006130 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006131 }
6132
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006133 ALWAYS_INLINE std::string_view GetNameView() {
6134 if (name_view_.empty()) {
6135 name_view_ = dex_file_->StringViewByIdx(mid_->name_idx_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006136 }
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006137 return name_view_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006138 }
6139
Mathieu Chartiere401d142015-04-22 13:56:20 -07006140 bool HasSameNameAndSignature(ArtMethod* other)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006141 REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -07006142 DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006143 const DexFile* other_dex_file = other->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006144 const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006145 if (dex_file_ == other_dex_file) {
6146 return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6147 }
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006148 return GetNameView() == other_dex_file->StringViewByIdx(other_mid.name_idx_) &&
6149 dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006150 }
6151
6152 private:
6153 // Dex file for the method to compare against.
6154 const DexFile* const dex_file_;
6155 // MethodId for the method to compare against.
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006156 const dex::MethodId* const mid_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006157 // Lazily computed name from the dex file's strings.
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006158 std::string_view name_view_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006159};
6160
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006161ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006162 ArtMethod* conflict_method,
6163 ArtMethod* interface_method,
Nicolas Geoffray47213e42020-12-30 15:12:00 +00006164 ArtMethod* method) {
Andreas Gampe542451c2016-07-26 09:02:02 -07006165 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006166 Runtime* const runtime = Runtime::Current();
6167 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006168
6169 // Create a new entry if the existing one is the shared conflict method.
Nicolas Geoffray47213e42020-12-30 15:12:00 +00006170 ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006171 ? runtime->CreateImtConflictMethod(linear_alloc)
6172 : conflict_method;
6173
6174 // Allocate a new table. Note that we will leak this table at the next conflict,
6175 // but that's a tradeoff compared to making the table fixed size.
6176 void* data = linear_alloc->Alloc(
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006177 Thread::Current(), ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table,
6178 image_pointer_size_));
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006179 if (data == nullptr) {
6180 LOG(ERROR) << "Failed to allocate conflict table";
6181 return conflict_method;
6182 }
6183 ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6184 interface_method,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006185 method,
6186 image_pointer_size_);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006187
6188 // Do a fence to ensure threads see the data in the table before it is assigned
6189 // to the conflict method.
6190 // Note that there is a race in the presence of multiple threads and we may leak
6191 // memory from the LinearAlloc, but that's a tradeoff compared to using
6192 // atomic operations.
Orion Hodson27b96762018-03-13 16:06:57 +00006193 std::atomic_thread_fence(std::memory_order_release);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006194 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006195 return new_conflict_method;
6196}
6197
6198void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6199 ArtMethod* imt_conflict_method,
6200 ArtMethod* current_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006201 /*out*/bool* new_conflict,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006202 /*out*/ArtMethod** imt_ref) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006203 // Place method in imt if entry is empty, place conflict otherwise.
6204 if (*imt_ref == unimplemented_method) {
6205 *imt_ref = current_method;
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006206 } else if (!(*imt_ref)->IsRuntimeMethod()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006207 // If we are not a conflict and we have the same signature and name as the imt
6208 // entry, it must be that we overwrote a superclass vtable entry.
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006209 // Note that we have checked IsRuntimeMethod, as there may be multiple different
6210 // conflict methods.
Alex Lighteb7c1442015-08-31 13:17:42 -07006211 MethodNameAndSignatureComparator imt_comparator(
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006212 (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
Alex Lighteb7c1442015-08-31 13:17:42 -07006213 if (imt_comparator.HasSameNameAndSignature(
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006214 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006215 *imt_ref = current_method;
6216 } else {
Alex Light9139e002015-10-09 15:59:48 -07006217 *imt_ref = imt_conflict_method;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006218 *new_conflict = true;
Alex Lighteb7c1442015-08-31 13:17:42 -07006219 }
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006220 } else {
6221 // Place the default conflict method. Note that there may be an existing conflict
6222 // method in the IMT, but it could be one tailored to the super class, with a
6223 // specific ImtConflictTable.
6224 *imt_ref = imt_conflict_method;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006225 *new_conflict = true;
Alex Lighteb7c1442015-08-31 13:17:42 -07006226 }
6227}
6228
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006229void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
David Sehr709b0702016-10-13 09:12:37 -07006230 DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6231 DCHECK(!klass->IsTemp()) << klass->PrettyClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006232 ArtMethod* imt_data[ImTable::kSize];
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006233 Runtime* const runtime = Runtime::Current();
6234 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6235 ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006236 std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006237 if (klass->GetIfTable() != nullptr) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006238 bool new_conflict = false;
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006239 FillIMTFromIfTable(klass->GetIfTable(),
6240 unimplemented_method,
6241 conflict_method,
6242 klass,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07006243 /*create_conflict_tables=*/true,
6244 /*ignore_copied_methods=*/false,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006245 &new_conflict,
6246 &imt_data[0]);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006247 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006248 // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6249 // we can just use the same pointer.
6250 ImTable* imt = nullptr;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006251 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006252 if (super_class != nullptr && super_class->ShouldHaveImt()) {
6253 ImTable* super_imt = super_class->GetImt(image_pointer_size_);
6254 bool same = true;
6255 for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6256 ArtMethod* method = imt_data[i];
6257 ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6258 if (method != super_method) {
6259 bool is_conflict_table = method->IsRuntimeMethod() &&
6260 method != unimplemented_method &&
6261 method != conflict_method;
6262 // Verify conflict contents.
6263 bool super_conflict_table = super_method->IsRuntimeMethod() &&
6264 super_method != unimplemented_method &&
6265 super_method != conflict_method;
6266 if (!is_conflict_table || !super_conflict_table) {
6267 same = false;
6268 } else {
6269 ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
6270 ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
6271 same = same && table1->Equals(table2, image_pointer_size_);
6272 }
6273 }
6274 }
6275 if (same) {
6276 imt = super_imt;
6277 }
6278 }
6279 if (imt == nullptr) {
6280 imt = klass->GetImt(image_pointer_size_);
6281 DCHECK(imt != nullptr);
6282 imt->Populate(imt_data, image_pointer_size_);
6283 } else {
6284 klass->SetImt(imt, image_pointer_size_);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006285 }
6286}
6287
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006288ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
6289 LinearAlloc* linear_alloc,
Andreas Gampe542451c2016-07-26 09:02:02 -07006290 PointerSize image_pointer_size) {
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006291 void* data = linear_alloc->Alloc(Thread::Current(),
6292 ImtConflictTable::ComputeSize(count,
6293 image_pointer_size));
6294 return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
6295}
6296
6297ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
6298 return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
6299}
6300
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006301void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006302 ArtMethod* unimplemented_method,
6303 ArtMethod* imt_conflict_method,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006304 ObjPtr<mirror::Class> klass,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006305 bool create_conflict_tables,
6306 bool ignore_copied_methods,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006307 /*out*/bool* new_conflict,
6308 /*out*/ArtMethod** imt) {
6309 uint32_t conflict_counts[ImTable::kSize] = {};
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006310 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006311 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006312 const size_t num_virtuals = interface->NumVirtualMethods();
6313 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6314 // Virtual methods can be larger than the if table methods if there are default methods.
6315 DCHECK_GE(num_virtuals, method_array_count);
6316 if (kIsDebugBuild) {
6317 if (klass->IsInterface()) {
6318 DCHECK_EQ(method_array_count, 0u);
6319 } else {
6320 DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
6321 }
6322 }
6323 if (method_array_count == 0) {
6324 continue;
6325 }
Vladimir Marko557fece2019-03-26 14:29:41 +00006326 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006327 for (size_t j = 0; j < method_array_count; ++j) {
6328 ArtMethod* implementation_method =
6329 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6330 if (ignore_copied_methods && implementation_method->IsCopied()) {
6331 continue;
6332 }
6333 DCHECK(implementation_method != nullptr);
6334 // Miranda methods cannot be used to implement an interface method, but they are safe to put
6335 // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
6336 // or interface methods in the IMT here they will not create extra conflicts since we compare
6337 // names and signatures in SetIMTRef.
6338 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
David Srbeckye36e7f22018-11-14 14:21:23 +00006339 const uint32_t imt_index = interface_method->GetImtIndex();
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006340
6341 // There is only any conflicts if all of the interface methods for an IMT slot don't have
6342 // the same implementation method, keep track of this to avoid creating a conflict table in
6343 // this case.
6344
6345 // Conflict table size for each IMT slot.
6346 ++conflict_counts[imt_index];
6347
6348 SetIMTRef(unimplemented_method,
6349 imt_conflict_method,
6350 implementation_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006351 /*out*/new_conflict,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006352 /*out*/&imt[imt_index]);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006353 }
6354 }
6355
6356 if (create_conflict_tables) {
6357 // Create the conflict tables.
6358 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006359 for (size_t i = 0; i < ImTable::kSize; ++i) {
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006360 size_t conflicts = conflict_counts[i];
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006361 if (imt[i] == imt_conflict_method) {
6362 ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
6363 if (new_table != nullptr) {
6364 ArtMethod* new_conflict_method =
6365 Runtime::Current()->CreateImtConflictMethod(linear_alloc);
6366 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6367 imt[i] = new_conflict_method;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006368 } else {
6369 LOG(ERROR) << "Failed to allocate conflict table";
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006370 imt[i] = imt_conflict_method;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006371 }
6372 } else {
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006373 DCHECK_NE(imt[i], imt_conflict_method);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006374 }
6375 }
6376
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006377 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006378 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006379 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6380 // Virtual methods can be larger than the if table methods if there are default methods.
6381 if (method_array_count == 0) {
6382 continue;
6383 }
Vladimir Marko557fece2019-03-26 14:29:41 +00006384 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006385 for (size_t j = 0; j < method_array_count; ++j) {
6386 ArtMethod* implementation_method =
6387 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6388 if (ignore_copied_methods && implementation_method->IsCopied()) {
6389 continue;
6390 }
6391 DCHECK(implementation_method != nullptr);
6392 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
David Srbeckye36e7f22018-11-14 14:21:23 +00006393 const uint32_t imt_index = interface_method->GetImtIndex();
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006394 if (!imt[imt_index]->IsRuntimeMethod() ||
6395 imt[imt_index] == unimplemented_method ||
6396 imt[imt_index] == imt_conflict_method) {
6397 continue;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006398 }
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006399 ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
6400 const size_t num_entries = table->NumEntries(image_pointer_size_);
6401 table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
6402 table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006403 }
6404 }
6405 }
6406}
6407
Vladimir Marko78f62d82022-01-10 16:25:19 +00006408namespace {
6409
Alex Lighteb7c1442015-08-31 13:17:42 -07006410// Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
6411// set.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006412static bool NotSubinterfaceOfAny(
Vladimir Marko78f62d82022-01-10 16:25:19 +00006413 const ScopedArenaHashSet<mirror::Class*>& classes,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006414 ObjPtr<mirror::Class> val)
Alex Lighteb7c1442015-08-31 13:17:42 -07006415 REQUIRES(Roles::uninterruptible_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006416 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006417 DCHECK(val != nullptr);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006418 for (ObjPtr<mirror::Class> c : classes) {
6419 if (val->IsAssignableFrom(c)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006420 return false;
6421 }
6422 }
6423 return true;
6424}
6425
Vladimir Marko78f62d82022-01-10 16:25:19 +00006426// We record new interfaces by the index of the direct interface and the index in the
6427// direct interface's `IfTable`, or `dex::kDexNoIndex` if it's the direct interface itself.
6428struct NewInterfaceReference {
6429 uint32_t direct_interface_index;
6430 uint32_t direct_interface_iftable_index;
6431};
6432
6433class ProxyInterfacesAccessor {
6434 public:
6435 explicit ProxyInterfacesAccessor(Handle<mirror::ObjectArray<mirror::Class>> interfaces)
6436 REQUIRES_SHARED(Locks::mutator_lock_)
6437 : interfaces_(interfaces) {}
6438
6439 size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6440 return interfaces_->GetLength();
6441 }
6442
6443 ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6444 DCHECK_LT(index, GetLength());
6445 return interfaces_->GetWithoutChecks(index);
6446 }
6447
6448 private:
6449 Handle<mirror::ObjectArray<mirror::Class>> interfaces_;
6450};
6451
6452class NonProxyInterfacesAccessor {
6453 public:
6454 NonProxyInterfacesAccessor(ClassLinker* class_linker, Handle<mirror::Class> klass)
6455 REQUIRES_SHARED(Locks::mutator_lock_)
6456 : interfaces_(klass->GetInterfaceTypeList()),
6457 class_linker_(class_linker),
6458 klass_(klass) {
6459 DCHECK(!klass->IsProxyClass());
6460 }
6461
6462 size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6463 return (interfaces_ != nullptr) ? interfaces_->Size() : 0u;
6464 }
6465
6466 ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6467 DCHECK_LT(index, GetLength());
6468 dex::TypeIndex type_index = interfaces_->GetTypeItem(index).type_idx_;
6469 return class_linker_->LookupResolvedType(type_index, klass_.Get());
6470 }
6471
6472 private:
6473 const dex::TypeList* interfaces_;
6474 ClassLinker* class_linker_;
6475 Handle<mirror::Class> klass_;
6476};
6477
6478// Finds new interfaces to add to the interface table in addition to superclass interfaces.
Alex Lighteb7c1442015-08-31 13:17:42 -07006479//
Vladimir Marko78f62d82022-01-10 16:25:19 +00006480// Interfaces in the interface table must satisfy the following constraint:
6481// all I, J: Interface | I <: J implies J precedes I
6482// (note A <: B means that A is a subtype of B). We order this backwards so that we do not need
6483// to reorder superclass interfaces when new interfaces are added in subclass's interface tables.
Alex Lighteb7c1442015-08-31 13:17:42 -07006484//
Vladimir Marko78f62d82022-01-10 16:25:19 +00006485// This function returns a list of references for all interfaces in the transitive
6486// closure of the direct interfaces that are not in the superclass interfaces.
6487// The entries in the list are ordered to satisfy the interface table ordering
6488// constraint and therefore the interface table formed by appending them to the
6489// superclass interface table shall also satisfy that constraint.
6490template <typename InterfaceAccessor>
6491ALWAYS_INLINE
6492static ArrayRef<const NewInterfaceReference> FindNewIfTableInterfaces(
6493 ObjPtr<mirror::IfTable> super_iftable,
6494 size_t super_ifcount,
6495 ScopedArenaAllocator* allocator,
6496 InterfaceAccessor&& interfaces,
6497 ArrayRef<NewInterfaceReference> initial_storage,
6498 /*out*/ScopedArenaVector<NewInterfaceReference>* supplemental_storage)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006499 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko782fb712020-12-23 12:47:31 +00006500 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006501
Vladimir Marko782fb712020-12-23 12:47:31 +00006502 // This is the set of all classes already in the iftable. Used to make checking
6503 // if a class has already been added quicker.
6504 constexpr size_t kBufferSize = 32; // 256 bytes on 64-bit architectures.
6505 mirror::Class* buffer[kBufferSize];
Vladimir Marko78f62d82022-01-10 16:25:19 +00006506 ScopedArenaHashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize, allocator->Adapter());
Alex Lighteb7c1442015-08-31 13:17:42 -07006507 // The first super_ifcount elements are from the superclass. We note that they are already added.
6508 for (size_t i = 0; i < super_ifcount; i++) {
Vladimir Marko78f62d82022-01-10 16:25:19 +00006509 ObjPtr<mirror::Class> iface = super_iftable->GetInterface(i);
Alex Lighteb7c1442015-08-31 13:17:42 -07006510 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
Vladimir Marko78f62d82022-01-10 16:25:19 +00006511 classes_in_iftable.Put(iface.Ptr());
Alex Lighteb7c1442015-08-31 13:17:42 -07006512 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00006513
6514 ArrayRef<NewInterfaceReference> current_storage = initial_storage;
6515 DCHECK_NE(current_storage.size(), 0u);
6516 size_t num_new_interfaces = 0u;
6517 auto insert_reference = [&](uint32_t direct_interface_index,
6518 uint32_t direct_interface_iface_index) {
6519 if (UNLIKELY(num_new_interfaces == current_storage.size())) {
6520 bool copy = current_storage.data() != supplemental_storage->data();
6521 supplemental_storage->resize(2u * num_new_interfaces);
6522 if (copy) {
6523 std::copy_n(current_storage.data(), num_new_interfaces, supplemental_storage->data());
6524 }
6525 current_storage = ArrayRef<NewInterfaceReference>(*supplemental_storage);
6526 }
6527 current_storage[num_new_interfaces] = {direct_interface_index, direct_interface_iface_index};
6528 ++num_new_interfaces;
6529 };
6530
6531 for (size_t i = 0, num_interfaces = interfaces.GetLength(); i != num_interfaces; ++i) {
6532 ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
Vladimir Marko782fb712020-12-23 12:47:31 +00006533
Alex Lighteb7c1442015-08-31 13:17:42 -07006534 // Let us call the first filled_ifcount elements of iftable the current-iface-list.
6535 // At this point in the loop current-iface-list has the invariant that:
6536 // for every pair of interfaces I,J within it:
6537 // if index_of(I) < index_of(J) then I is not a subtype of J
6538
6539 // If we have already seen this element then all of its super-interfaces must already be in the
6540 // current-iface-list so we can skip adding it.
Vladimir Marko782fb712020-12-23 12:47:31 +00006541 if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006542 // We haven't seen this interface so add all of its super-interfaces onto the
6543 // current-iface-list, skipping those already on it.
6544 int32_t ifcount = interface->GetIfTableCount();
6545 for (int32_t j = 0; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006546 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006547 if (classes_in_iftable.find(super_interface.Ptr()) == classes_in_iftable.end()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006548 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
Vladimir Marko78f62d82022-01-10 16:25:19 +00006549 classes_in_iftable.Put(super_interface.Ptr());
6550 insert_reference(i, j);
Alex Lighteb7c1442015-08-31 13:17:42 -07006551 }
6552 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00006553 // Add this interface reference after all of its super-interfaces.
Alex Lighteb7c1442015-08-31 13:17:42 -07006554 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
Vladimir Marko78f62d82022-01-10 16:25:19 +00006555 classes_in_iftable.Put(interface.Ptr());
6556 insert_reference(i, dex::kDexNoIndex);
Alex Lighteb7c1442015-08-31 13:17:42 -07006557 } else if (kIsDebugBuild) {
6558 // Check all super-interfaces are already in the list.
6559 int32_t ifcount = interface->GetIfTableCount();
6560 for (int32_t j = 0; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006561 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006562 DCHECK(classes_in_iftable.find(super_interface.Ptr()) != classes_in_iftable.end())
David Sehr709b0702016-10-13 09:12:37 -07006563 << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
6564 << ", a superinterface of " << interface->PrettyClass();
Alex Lighteb7c1442015-08-31 13:17:42 -07006565 }
6566 }
6567 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00006568 return ArrayRef<const NewInterfaceReference>(current_storage.data(), num_new_interfaces);
6569}
6570
6571template <typename InterfaceAccessor>
6572static ObjPtr<mirror::IfTable> SetupInterfaceLookupTable(
6573 Thread* self,
6574 Handle<mirror::Class> klass,
6575 ScopedArenaAllocator* allocator,
6576 InterfaceAccessor&& interfaces)
6577 REQUIRES_SHARED(Locks::mutator_lock_) {
6578 DCHECK(klass->HasSuperClass());
6579 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
Vladimir Marko0441d202022-02-18 13:55:15 +00006580 DCHECK(super_iftable != nullptr);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006581 const size_t num_interfaces = interfaces.GetLength();
6582
Vladimir Marko0441d202022-02-18 13:55:15 +00006583 // If there are no new interfaces, return the interface table from superclass.
6584 // If any implementation methods are overridden, we shall copy the table and
6585 // the method arrays that contain any differences (copy-on-write).
6586 if (num_interfaces == 0) {
Vladimir Marko78f62d82022-01-10 16:25:19 +00006587 return super_iftable;
6588 }
6589
6590 // Check that every class being implemented is an interface.
6591 for (size_t i = 0; i != num_interfaces; ++i) {
6592 ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
6593 DCHECK(interface != nullptr);
6594 if (UNLIKELY(!interface->IsInterface())) {
6595 ThrowIncompatibleClassChangeError(klass.Get(),
6596 "Class %s implements non-interface class %s",
6597 klass->PrettyDescriptor().c_str(),
6598 interface->PrettyDescriptor().c_str());
6599 return nullptr;
6600 }
6601 }
6602
6603 static constexpr size_t kMaxStackReferences = 16;
6604 NewInterfaceReference initial_storage[kMaxStackReferences];
6605 ScopedArenaVector<NewInterfaceReference> supplemental_storage(allocator->Adapter());
Vladimir Marko0441d202022-02-18 13:55:15 +00006606 const size_t super_ifcount = super_iftable->Count();
Vladimir Marko78f62d82022-01-10 16:25:19 +00006607 ArrayRef<const NewInterfaceReference> new_interface_references =
6608 FindNewIfTableInterfaces(
6609 super_iftable,
6610 super_ifcount,
6611 allocator,
6612 interfaces,
6613 ArrayRef<NewInterfaceReference>(initial_storage),
6614 &supplemental_storage);
6615
Vladimir Marko0441d202022-02-18 13:55:15 +00006616 // If all declared interfaces were already present in superclass interface table,
6617 // return the interface table from superclass. See above.
6618 if (UNLIKELY(new_interface_references.empty())) {
6619 return super_iftable;
Vladimir Marko78f62d82022-01-10 16:25:19 +00006620 }
6621
6622 // Create the interface table.
6623 size_t ifcount = super_ifcount + new_interface_references.size();
6624 ObjPtr<mirror::IfTable> iftable = AllocIfTable(self, ifcount, super_iftable->GetClass());
6625 if (UNLIKELY(iftable == nullptr)) {
6626 self->AssertPendingOOMException();
6627 return nullptr;
6628 }
6629 // Fill in table with superclass's iftable.
6630 if (super_ifcount != 0) {
6631 // Reload `super_iftable` as it may have been clobbered by the allocation.
6632 super_iftable = klass->GetSuperClass()->GetIfTable();
Vladimir Marko0441d202022-02-18 13:55:15 +00006633 for (size_t i = 0; i != super_ifcount; i++) {
Vladimir Marko78f62d82022-01-10 16:25:19 +00006634 ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
Vladimir Marko0441d202022-02-18 13:55:15 +00006635 DCHECK(super_interface != nullptr);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006636 iftable->SetInterface(i, super_interface);
Vladimir Marko0441d202022-02-18 13:55:15 +00006637 ObjPtr<mirror::PointerArray> method_array = super_iftable->GetMethodArrayOrNull(i);
6638 if (method_array != nullptr) {
6639 iftable->SetMethodArray(i, method_array);
6640 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00006641 }
6642 }
6643 // Fill in the table with additional interfaces.
6644 size_t current_index = super_ifcount;
6645 for (NewInterfaceReference ref : new_interface_references) {
6646 ObjPtr<mirror::Class> direct_interface = interfaces.GetInterface(ref.direct_interface_index);
6647 ObjPtr<mirror::Class> new_interface = (ref.direct_interface_iftable_index != dex::kDexNoIndex)
6648 ? direct_interface->GetIfTable()->GetInterface(ref.direct_interface_iftable_index)
6649 : direct_interface;
6650 iftable->SetInterface(current_index, new_interface);
6651 ++current_index;
6652 }
6653 DCHECK_EQ(current_index, ifcount);
6654
Alex Lighteb7c1442015-08-31 13:17:42 -07006655 if (kIsDebugBuild) {
6656 // Check that the iftable is ordered correctly.
Vladimir Marko78f62d82022-01-10 16:25:19 +00006657 for (size_t i = 0; i < ifcount; i++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006658 ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006659 for (size_t j = i + 1; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006660 ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
Alex Lighteb7c1442015-08-31 13:17:42 -07006661 // !(if_a <: if_b)
6662 CHECK(!if_b->IsAssignableFrom(if_a))
David Sehr709b0702016-10-13 09:12:37 -07006663 << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
6664 << ") extends "
6665 << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
Alex Lighteb7c1442015-08-31 13:17:42 -07006666 << "interface list.";
6667 }
6668 }
6669 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006670
Vladimir Marko78f62d82022-01-10 16:25:19 +00006671 return iftable;
Alex Lighteb7c1442015-08-31 13:17:42 -07006672}
6673
Alex Light1f3925d2016-09-07 12:04:20 -07006674// Check that all vtable entries are present in this class's virtuals or are the same as a
6675// superclasses vtable entry.
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006676void CheckClassOwnsVTableEntries(Thread* self,
6677 Handle<mirror::Class> klass,
6678 PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006679 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light1f3925d2016-09-07 12:04:20 -07006680 StackHandleScope<2> hs(self);
6681 Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006682 ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
Alex Light1f3925d2016-09-07 12:04:20 -07006683 Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
Andreas Gampefa4333d2017-02-14 11:10:34 -08006684 int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
Alex Lighte64300b2015-12-15 15:02:47 -08006685 for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
6686 ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
6687 CHECK(m != nullptr);
6688
Alex Lighta41a30782017-03-29 11:33:19 -07006689 if (m->GetMethodIndexDuringLinking() != i) {
6690 LOG(WARNING) << m->PrettyMethod()
6691 << " has an unexpected method index for its spot in the vtable for class"
6692 << klass->PrettyClass();
6693 }
Alex Lighte64300b2015-12-15 15:02:47 -08006694 ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
6695 auto is_same_method = [m] (const ArtMethod& meth) {
6696 return &meth == m;
6697 };
Alex Light3f980532017-03-17 15:10:32 -07006698 if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
6699 std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
6700 LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
6701 << klass->PrettyClass() << " or any of its superclasses!";
6702 }
Alex Lighte64300b2015-12-15 15:02:47 -08006703 }
6704}
6705
Alex Light1f3925d2016-09-07 12:04:20 -07006706// Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
6707// method is overridden in a subclass.
Andreas Gampea2fed082019-02-01 09:34:43 -08006708template <PointerSize kPointerSize>
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006709void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
Alex Light1f3925d2016-09-07 12:04:20 -07006710 REQUIRES_SHARED(Locks::mutator_lock_) {
6711 StackHandleScope<1> hs(self);
6712 Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
6713 int32_t num_entries = vtable->GetLength();
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006714
6715 // Observations:
6716 // * The older implementation was O(n^2) and got too expensive for apps with larger classes.
6717 // * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
6718 // for many classes outside of libcore a cross-dexfile check has to be run anyways.
6719 // * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
6720 // to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
6721 // * The single-pass algorithm will trade memory for speed, but that is OK.
6722
6723 CHECK_GT(num_entries, 0);
6724
6725 auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
6726 ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
6727 ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
6728 LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
6729 << klass->PrettyClass() << " in method " << m1->PrettyMethod()
6730 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
6731 << m2->PrettyMethod() << " (0x" << std::hex
6732 << reinterpret_cast<uintptr_t>(m2) << ")";
6733 };
6734 struct BaseHashType {
6735 static size_t HashCombine(size_t seed, size_t val) {
6736 return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
6737 }
6738 };
6739
6740 // Check assuming all entries come from the same dex file.
6741 {
6742 // Find the first interesting method and its dex file.
6743 int32_t start = 0;
6744 for (; start < num_entries; ++start) {
6745 ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
6746 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
6747 // maybe).
6748 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6749 vtable_entry->GetAccessFlags())) {
6750 continue;
6751 }
6752 break;
6753 }
6754 if (start == num_entries) {
6755 return;
6756 }
6757 const DexFile* dex_file =
6758 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
6759 GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
6760
6761 // Helper function to avoid logging if we have to run the cross-file checks.
6762 auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
6763 // Use a map to store seen entries, as the storage space is too large for a bitvector.
6764 using PairType = std::pair<uint32_t, uint16_t>;
6765 struct PairHash : BaseHashType {
6766 size_t operator()(const PairType& key) const {
6767 return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
6768 }
6769 };
Vladimir Marko782fb712020-12-23 12:47:31 +00006770 HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006771 seen.reserve(2 * num_entries);
6772 bool need_slow_path = false;
6773 bool found_dup = false;
6774 for (int i = start; i < num_entries; ++i) {
6775 // Can use Unchecked here as the start loop already ensured that the arrays are correct
6776 // wrt/ kPointerSize.
6777 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
6778 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6779 vtable_entry->GetAccessFlags())) {
6780 continue;
6781 }
6782 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
6783 if (dex_file != m->GetDexFile()) {
6784 need_slow_path = true;
6785 break;
6786 }
6787 const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
6788 PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
6789 auto it = seen.find(pair);
6790 if (it != seen.end()) {
6791 found_dup = true;
6792 if (log_warn) {
6793 log_fn(it->second, i);
6794 }
6795 } else {
Vladimir Marko782fb712020-12-23 12:47:31 +00006796 seen.insert(std::make_pair(pair, i));
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006797 }
6798 }
6799 return std::make_pair(need_slow_path, found_dup);
6800 };
6801 std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
6802 if (!result.first) {
6803 if (result.second) {
6804 check_fn(/* log_warn= */ true);
6805 }
6806 return;
6807 }
6808 }
6809
6810 // Need to check across dex files.
6811 struct Entry {
6812 size_t cached_hash = 0;
Vladimir Markoaa027b82021-01-06 20:34:20 +00006813 uint32_t name_len = 0;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006814 const char* name = nullptr;
6815 Signature signature = Signature::NoSignature();
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006816
Vladimir Marko782fb712020-12-23 12:47:31 +00006817 Entry() = default;
6818 Entry(const Entry& other) = default;
6819 Entry& operator=(const Entry& other) = default;
6820
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006821 Entry(const DexFile* dex_file, const dex::MethodId& mid)
Vladimir Markoaa027b82021-01-06 20:34:20 +00006822 : name_len(0), // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
6823 // This call writes `name_len` and it is therefore necessary that the
6824 // initializer for `name_len` comes before it, otherwise the value
6825 // from the call would be overwritten by that initializer.
6826 name(dex_file->StringDataAndUtf16LengthByIdx(mid.name_idx_, &name_len)),
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006827 signature(dex_file->GetMethodSignature(mid)) {
Vladimir Markoaa027b82021-01-06 20:34:20 +00006828 // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
6829 if (name[name_len] != 0) {
6830 name_len += strlen(name + name_len);
6831 }
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006832 }
6833
6834 bool operator==(const Entry& other) const {
Vladimir Marko782fb712020-12-23 12:47:31 +00006835 return name_len == other.name_len &&
6836 memcmp(name, other.name, name_len) == 0 &&
6837 signature == other.signature;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006838 }
6839 };
6840 struct EntryHash {
6841 size_t operator()(const Entry& key) const {
6842 return key.cached_hash;
6843 }
6844 };
Vladimir Marko782fb712020-12-23 12:47:31 +00006845 HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006846 for (int32_t i = 0; i < num_entries; ++i) {
6847 // Can use Unchecked here as the first loop already ensured that the arrays are correct
6848 // wrt/ kPointerSize.
6849 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
6850 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
6851 // maybe).
Alex Light1f3925d2016-09-07 12:04:20 -07006852 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6853 vtable_entry->GetAccessFlags())) {
6854 continue;
6855 }
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006856 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
6857 const DexFile* dex_file = m->GetDexFile();
6858 const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
6859
6860 Entry e(dex_file, mid);
6861
6862 size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
6863 size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
6864 e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
6865 sig_hash);
6866
6867 auto it = map.find(e);
6868 if (it != map.end()) {
6869 log_fn(it->second, i);
6870 } else {
Vladimir Marko782fb712020-12-23 12:47:31 +00006871 map.insert(std::make_pair(e, i));
Alex Light1f3925d2016-09-07 12:04:20 -07006872 }
6873 }
6874}
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006875
6876void CheckVTableHasNoDuplicates(Thread* self,
6877 Handle<mirror::Class> klass,
6878 PointerSize pointer_size)
Andreas Gampea2fed082019-02-01 09:34:43 -08006879 REQUIRES_SHARED(Locks::mutator_lock_) {
6880 switch (pointer_size) {
6881 case PointerSize::k64:
6882 CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
6883 break;
6884 case PointerSize::k32:
6885 CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
6886 break;
6887 }
6888}
Alex Light1f3925d2016-09-07 12:04:20 -07006889
Orion Hodson5880c772020-07-28 20:12:08 +01006890static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
Alex Light1f3925d2016-09-07 12:04:20 -07006891 REQUIRES_SHARED(Locks::mutator_lock_) {
6892 CheckClassOwnsVTableEntries(self, klass, pointer_size);
6893 CheckVTableHasNoDuplicates(self, klass, pointer_size);
6894}
6895
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006896} // namespace
6897
Vladimir Markob91402f2021-12-21 15:55:06 +00006898template <PointerSize kPointerSize>
Vladimir Markobc893672021-11-10 15:25:46 +00006899class ClassLinker::LinkMethodsHelper {
Vladimir Marko921094a2017-01-12 18:37:06 +00006900 public:
Vladimir Markobc893672021-11-10 15:25:46 +00006901 LinkMethodsHelper(ClassLinker* class_linker,
6902 Handle<mirror::Class> klass,
6903 Thread* self,
6904 Runtime* runtime)
Vladimir Marko921094a2017-01-12 18:37:06 +00006905 : class_linker_(class_linker),
6906 klass_(klass),
Vladimir Marko921094a2017-01-12 18:37:06 +00006907 self_(self),
Vladimir Marko78f62d82022-01-10 16:25:19 +00006908 runtime_(runtime),
Vladimir Marko921094a2017-01-12 18:37:06 +00006909 stack_(runtime->GetLinearAlloc()->GetArenaPool()),
6910 allocator_(&stack_),
Vladimir Marko19366b82022-01-18 10:41:28 +00006911 copied_method_records_(copied_method_records_initial_buffer_,
6912 kCopiedMethodRecordInitialBufferSize,
6913 allocator_.Adapter()),
6914 num_new_copied_methods_(0u) {
Vladimir Marko921094a2017-01-12 18:37:06 +00006915 }
6916
Vladimir Marko78f62d82022-01-10 16:25:19 +00006917 // Links the virtual and interface methods for the given class.
Vladimir Markobc893672021-11-10 15:25:46 +00006918 //
6919 // Arguments:
6920 // * self - The current thread.
6921 // * klass - class, whose vtable will be filled in.
Vladimir Marko78f62d82022-01-10 16:25:19 +00006922 // * interfaces - implemented interfaces for a proxy class, otherwise null.
6923 // * out_new_conflict - whether there is a new conflict compared to the superclass.
6924 // * out_imt - interface method table to fill.
6925 bool LinkMethods(
Vladimir Markobc893672021-11-10 15:25:46 +00006926 Thread* self,
6927 Handle<mirror::Class> klass,
Vladimir Marko78f62d82022-01-10 16:25:19 +00006928 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
Vladimir Markobc893672021-11-10 15:25:46 +00006929 bool* out_new_conflict,
6930 ArtMethod** out_imt)
6931 REQUIRES_SHARED(Locks::mutator_lock_);
6932
6933 private:
Vladimir Marko0441d202022-02-18 13:55:15 +00006934 // Allocate a pointer array.
6935 static ObjPtr<mirror::PointerArray> AllocPointerArray(Thread* self, size_t length)
6936 REQUIRES_SHARED(Locks::mutator_lock_);
6937
6938 // Allocate method arrays for interfaces.
6939 bool AllocateIfTableMethodArrays(Thread* self,
6940 Handle<mirror::Class> klass,
6941 Handle<mirror::IfTable> iftable)
6942 REQUIRES_SHARED(Locks::mutator_lock_);
6943
Vladimir Marko8670e042021-12-21 17:55:48 +00006944 // Assign vtable indexes to declared virtual methods for a non-interface class other
6945 // than `java.lang.Object`. Returns the number of vtable entries on success, 0 on failure.
Vladimir Marko19366b82022-01-18 10:41:28 +00006946 // This function also assigns vtable indexes for interface methods in new interfaces
6947 // and records data for copied methods which shall be referenced by the vtable.
Vladimir Markobed84ef2022-01-21 13:57:14 +00006948 size_t AssignVTableIndexes(ObjPtr<mirror::Class> klass,
Vladimir Marko8670e042021-12-21 17:55:48 +00006949 ObjPtr<mirror::Class> super_class,
Vladimir Marko51718132022-02-07 16:31:08 +00006950 bool is_super_abstract,
Vladimir Marko19366b82022-01-18 10:41:28 +00006951 size_t num_virtual_methods,
6952 ObjPtr<mirror::IfTable> iftable)
6953 REQUIRES_SHARED(Locks::mutator_lock_);
6954
6955 bool FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,
6956 size_t num_virtual_methods,
6957 ObjPtr<mirror::IfTable> iftable)
Vladimir Marko8670e042021-12-21 17:55:48 +00006958 REQUIRES_SHARED(Locks::mutator_lock_);
6959
Vladimir Marko78f62d82022-01-10 16:25:19 +00006960 bool LinkJavaLangObjectMethods(Thread* self, Handle<mirror::Class> klass)
Vladimir Markob91402f2021-12-21 15:55:06 +00006961 REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
6962
Vladimir Marko19366b82022-01-18 10:41:28 +00006963 void ReallocMethods(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko0441d202022-02-18 13:55:15 +00006964 bool FinalizeIfTable(Handle<mirror::Class> klass,
6965 MutableHandle<mirror::IfTable> iftable,
6966 Handle<mirror::PointerArray> vtable,
Vladimir Marko51718132022-02-07 16:31:08 +00006967 bool is_klass_abstract,
6968 bool is_super_abstract,
Vladimir Marko19366b82022-01-18 10:41:28 +00006969 bool* out_new_conflict,
6970 ArtMethod** out_imt)
Vladimir Marko78f62d82022-01-10 16:25:19 +00006971 REQUIRES_SHARED(Locks::mutator_lock_);
6972
Vladimir Marko921094a2017-01-12 18:37:06 +00006973 void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
6974 LengthPrefixedArray<ArtMethod>* methods) {
Vladimir Marko19366b82022-01-18 10:41:28 +00006975 if (kIsDebugBuild && old_methods != nullptr) {
Vladimir Marko921094a2017-01-12 18:37:06 +00006976 CHECK(methods != nullptr);
6977 // Put some random garbage in old methods to help find stale pointers.
Vladimir Marko19366b82022-01-18 10:41:28 +00006978 if (methods != old_methods) {
Vladimir Marko921094a2017-01-12 18:37:06 +00006979 // Need to make sure the GC is not running since it could be scanning the methods we are
6980 // about to overwrite.
Vladimir Markoddf4fd32021-11-22 16:31:57 +00006981 ScopedThreadStateChange tsc(self_, ThreadState::kSuspended);
Vladimir Marko921094a2017-01-12 18:37:06 +00006982 gc::ScopedGCCriticalSection gcs(self_,
6983 gc::kGcCauseClassLinker,
6984 gc::kCollectorTypeClassLinker);
6985 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
Vladimir Markob91402f2021-12-21 15:55:06 +00006986 kMethodSize,
6987 kMethodAlignment);
Vladimir Marko921094a2017-01-12 18:37:06 +00006988 memset(old_methods, 0xFEu, old_size);
6989 }
6990 }
6991 }
6992
Vladimir Marko19366b82022-01-18 10:41:28 +00006993 NO_INLINE
6994 void LogNewVirtuals(LengthPrefixedArray<ArtMethod>* methods) const
6995 REQUIRES_SHARED(Locks::mutator_lock_) {
6996 ObjPtr<mirror::Class> klass = klass_.Get();
6997 size_t num_new_copied_methods = num_new_copied_methods_;
6998 size_t old_method_count = methods->size() - num_new_copied_methods;
6999 size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
7000 size_t num_miranda_methods = 0u;
7001 size_t num_overriding_default_methods = 0u;
7002 size_t num_default_methods = 0u;
7003 size_t num_overriding_default_conflict_methods = 0u;
7004 size_t num_default_conflict_methods = 0u;
7005 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7006 ArtMethod& m = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7007 if (m.IsDefault()) {
7008 if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7009 ++num_overriding_default_methods;
7010 } else {
7011 ++num_default_methods;
7012 }
7013 } else if (m.IsDefaultConflicting()) {
7014 if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7015 ++num_overriding_default_conflict_methods;
7016 } else {
7017 ++num_default_conflict_methods;
7018 }
7019 } else {
7020 DCHECK(m.IsMiranda());
7021 ++num_miranda_methods;
7022 }
Vladimir Marko5cfb7bb2022-01-28 11:12:17 +00007023 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007024 VLOG(class_linker) << klass->PrettyClass() << ": miranda_methods=" << num_miranda_methods
7025 << " default_methods=" << num_default_methods
7026 << " overriding_default_methods=" << num_overriding_default_methods
7027 << " default_conflict_methods=" << num_default_conflict_methods
Vladimir Marko921094a2017-01-12 18:37:06 +00007028 << " overriding_default_conflict_methods="
Vladimir Marko19366b82022-01-18 10:41:28 +00007029 << num_overriding_default_conflict_methods;
Vladimir Marko921094a2017-01-12 18:37:06 +00007030 }
7031
Vladimir Marko8670e042021-12-21 17:55:48 +00007032 class MethodIndexEmptyFn {
7033 public:
7034 void MakeEmpty(uint32_t& item) const {
7035 item = dex::kDexNoIndex;
7036 }
7037 bool IsEmpty(const uint32_t& item) const {
7038 return item == dex::kDexNoIndex;
7039 }
7040 };
7041
Vladimir Marko7ddae992022-01-18 14:27:20 +00007042 class VTableIndexCheckerDebug {
7043 protected:
7044 explicit VTableIndexCheckerDebug(size_t vtable_length)
7045 : vtable_length_(vtable_length) {}
Vladimir Marko8670e042021-12-21 17:55:48 +00007046
Vladimir Marko7ddae992022-01-18 14:27:20 +00007047 void CheckIndex(uint32_t index) const {
7048 CHECK_LT(index, vtable_length_);
Vladimir Marko8670e042021-12-21 17:55:48 +00007049 }
7050
7051 private:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007052 uint32_t vtable_length_;
Vladimir Marko8670e042021-12-21 17:55:48 +00007053 };
7054
Vladimir Marko7ddae992022-01-18 14:27:20 +00007055 class VTableIndexCheckerRelease {
7056 protected:
7057 explicit VTableIndexCheckerRelease(size_t vtable_length ATTRIBUTE_UNUSED) {}
7058 void CheckIndex(uint32_t index ATTRIBUTE_UNUSED) const {}
7059 };
Vladimir Marko8670e042021-12-21 17:55:48 +00007060
Vladimir Marko7ddae992022-01-18 14:27:20 +00007061 using VTableIndexChecker =
7062 std::conditional_t<kIsDebugBuild, VTableIndexCheckerDebug, VTableIndexCheckerRelease>;
7063
7064 class VTableAccessor : private VTableIndexChecker {
7065 public:
7066 VTableAccessor(uint8_t* raw_vtable, size_t vtable_length)
7067 REQUIRES_SHARED(Locks::mutator_lock_)
7068 : VTableIndexChecker(vtable_length),
7069 raw_vtable_(raw_vtable) {}
Vladimir Marko8670e042021-12-21 17:55:48 +00007070
7071 ArtMethod* GetVTableEntry(uint32_t index) const REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko7ddae992022-01-18 14:27:20 +00007072 this->CheckIndex(index);
7073 uint8_t* entry = raw_vtable_ + static_cast<size_t>(kPointerSize) * index;
7074 if (kPointerSize == PointerSize::k64) {
7075 return reinterpret_cast64<ArtMethod*>(*reinterpret_cast<uint64_t*>(entry));
7076 } else {
7077 return reinterpret_cast32<ArtMethod*>(*reinterpret_cast<uint32_t*>(entry));
7078 }
Vladimir Marko8670e042021-12-21 17:55:48 +00007079 }
7080
7081 private:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007082 uint8_t* raw_vtable_;
Vladimir Marko8670e042021-12-21 17:55:48 +00007083 };
7084
Vladimir Marko8670e042021-12-21 17:55:48 +00007085 class VTableSignatureHash {
7086 public:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007087 explicit VTableSignatureHash(VTableAccessor accessor)
Vladimir Marko8670e042021-12-21 17:55:48 +00007088 REQUIRES_SHARED(Locks::mutator_lock_)
7089 : accessor_(accessor) {}
7090
7091 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7092 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7093 return ComputeMethodHash(method);
7094 }
7095
7096 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7097 size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7098 return ComputeMethodHash(accessor_.GetVTableEntry(index));
7099 }
7100
7101 private:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007102 VTableAccessor accessor_;
Vladimir Marko8670e042021-12-21 17:55:48 +00007103 };
7104
Vladimir Marko8670e042021-12-21 17:55:48 +00007105 class VTableSignatureEqual {
7106 public:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007107 explicit VTableSignatureEqual(VTableAccessor accessor)
Vladimir Marko8670e042021-12-21 17:55:48 +00007108 REQUIRES_SHARED(Locks::mutator_lock_)
7109 : accessor_(accessor) {}
7110
7111 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7112 bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
Vladimir Markobed84ef2022-01-21 13:57:14 +00007113 return MethodSignatureEquals(accessor_.GetVTableEntry(lhs_index), rhs);
Vladimir Marko8670e042021-12-21 17:55:48 +00007114 }
7115
7116 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7117 bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7118 return (*this)(lhs_index, accessor_.GetVTableEntry(rhs_index));
7119 }
7120
7121 private:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007122 VTableAccessor accessor_;
Vladimir Marko8670e042021-12-21 17:55:48 +00007123 };
7124
Vladimir Marko7ddae992022-01-18 14:27:20 +00007125 using VTableSignatureSet =
7126 ScopedArenaHashSet<uint32_t, MethodIndexEmptyFn, VTableSignatureHash, VTableSignatureEqual>;
Vladimir Marko8670e042021-12-21 17:55:48 +00007127
Vladimir Marko19366b82022-01-18 10:41:28 +00007128 class DeclaredVirtualSignatureHash {
7129 public:
7130 explicit DeclaredVirtualSignatureHash(ObjPtr<mirror::Class> klass)
7131 REQUIRES_SHARED(Locks::mutator_lock_)
7132 : klass_(klass) {}
7133
7134 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7135 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7136 return ComputeMethodHash(method);
7137 }
7138
7139 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7140 size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7141 DCHECK_LT(index, klass_->NumDeclaredVirtualMethods());
7142 ArtMethod* method = klass_->GetVirtualMethodDuringLinking(index, kPointerSize);
7143 return ComputeMethodHash(method->GetInterfaceMethodIfProxy(kPointerSize));
7144 }
7145
7146 private:
7147 ObjPtr<mirror::Class> klass_;
7148 };
7149
7150 class DeclaredVirtualSignatureEqual {
7151 public:
7152 explicit DeclaredVirtualSignatureEqual(ObjPtr<mirror::Class> klass)
7153 REQUIRES_SHARED(Locks::mutator_lock_)
7154 : klass_(klass) {}
7155
7156 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7157 bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7158 DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7159 ArtMethod* lhs = klass_->GetVirtualMethodDuringLinking(lhs_index, kPointerSize);
7160 return MethodSignatureEquals(lhs->GetInterfaceMethodIfProxy(kPointerSize), rhs);
7161 }
7162
7163 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7164 bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7165 DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7166 DCHECK_LT(rhs_index, klass_->NumDeclaredVirtualMethods());
7167 return lhs_index == rhs_index;
7168 }
7169
7170 private:
7171 ObjPtr<mirror::Class> klass_;
7172 };
7173
7174 using DeclaredVirtualSignatureSet = ScopedArenaHashSet<uint32_t,
7175 MethodIndexEmptyFn,
7176 DeclaredVirtualSignatureHash,
7177 DeclaredVirtualSignatureEqual>;
7178
7179 // Helper class to keep records for determining the correct copied method to create.
7180 class CopiedMethodRecord {
7181 public:
7182 enum class State : uint32_t {
7183 // Note: The `*Single` values are used when we know that there is only one interface
7184 // method with the given signature that's not masked; that method is the main method.
7185 // We use this knowledge for faster masking check, otherwise we need to search for
7186 // a masking method through methods of all interfaces that could potentially mask it.
7187 kAbstractSingle,
7188 kDefaultSingle,
7189 kAbstract,
7190 kDefault,
7191 kDefaultConflict,
7192 kUseSuperMethod,
7193 };
7194
7195 CopiedMethodRecord()
7196 : main_method_(nullptr),
7197 method_index_(0u),
7198 state_(State::kAbstractSingle) {}
7199
7200 CopiedMethodRecord(ArtMethod* main_method, size_t vtable_index)
7201 : main_method_(main_method),
7202 method_index_(vtable_index),
7203 state_(State::kAbstractSingle) {}
7204
7205 // Set main method. The new main method must be more specific implementation.
7206 void SetMainMethod(ArtMethod* main_method) {
7207 DCHECK(main_method_ != nullptr);
7208 main_method_ = main_method;
7209 }
7210
7211 // The main method is the first encountered default method if any,
7212 // otherwise the first encountered abstract method.
7213 ArtMethod* GetMainMethod() const {
7214 return main_method_;
7215 }
7216
7217 void SetMethodIndex(size_t method_index) {
7218 DCHECK_NE(method_index, dex::kDexNoIndex);
7219 method_index_ = method_index;
7220 }
7221
7222 size_t GetMethodIndex() const {
7223 DCHECK_NE(method_index_, dex::kDexNoIndex);
7224 return method_index_;
7225 }
7226
7227 void SetState(State state) {
7228 state_ = state;
7229 }
7230
7231 State GetState() const {
7232 return state_;
7233 }
7234
7235 ALWAYS_INLINE
7236 void UpdateStateForInterface(ObjPtr<mirror::Class> iface,
7237 ArtMethod* interface_method,
7238 ObjPtr<mirror::IfTable> iftable,
7239 size_t ifcount,
7240 size_t index)
7241 REQUIRES_SHARED(Locks::mutator_lock_) {
7242 DCHECK_EQ(ifcount, iftable->Count());
7243 DCHECK_LT(index, ifcount);
7244 DCHECK(iface == interface_method->GetDeclaringClass());
7245 DCHECK(iface == iftable->GetInterface(index));
7246 DCHECK(interface_method->IsDefault());
7247 if (GetState() != State::kDefaultConflict) {
7248 DCHECK(GetState() == State::kDefault);
7249 // We do not record all overriding methods, so we need to walk over all
7250 // interfaces that could mask the `interface_method`.
7251 if (ContainsOverridingMethodOf(iftable, index + 1, ifcount, iface, interface_method)) {
7252 return; // Found an overriding method that masks `interface_method`.
7253 }
7254 // We have a new default method that's not masked by any other method.
7255 SetState(State::kDefaultConflict);
7256 }
7257 }
7258
7259 ALWAYS_INLINE
7260 void UpdateState(ObjPtr<mirror::Class> iface,
7261 ArtMethod* interface_method,
7262 size_t vtable_index,
7263 ObjPtr<mirror::IfTable> iftable,
7264 size_t ifcount,
7265 size_t index)
7266 REQUIRES_SHARED(Locks::mutator_lock_) {
7267 DCHECK_EQ(ifcount, iftable->Count());
7268 DCHECK_LT(index, ifcount);
7269 if (kIsDebugBuild) {
7270 if (interface_method->IsCopied()) {
7271 // Called from `FinalizeState()` for a default method from superclass.
7272 // The `index` points to the last interface inherited from the superclass
7273 // as we need to search only the new interfaces for masking methods.
7274 DCHECK(interface_method->IsDefault());
7275 } else {
7276 DCHECK(iface == interface_method->GetDeclaringClass());
7277 DCHECK(iface == iftable->GetInterface(index));
7278 }
7279 }
7280 DCHECK_EQ(vtable_index, method_index_);
7281 auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7282 return ContainsImplementingMethod(iftable, index + 1, ifcount, iface, vtable_index);
7283 };
7284 UpdateStateImpl(iface, interface_method, slow_is_masked);
7285 }
7286
7287 ALWAYS_INLINE
7288 void FinalizeState(ArtMethod* super_method,
7289 size_t vtable_index,
7290 ObjPtr<mirror::IfTable> iftable,
7291 size_t ifcount,
7292 ObjPtr<mirror::IfTable> super_iftable,
7293 size_t super_ifcount)
7294 REQUIRES_SHARED(Locks::mutator_lock_) {
7295 DCHECK(super_method->IsCopied());
7296 DCHECK_EQ(vtable_index, method_index_);
7297 DCHECK_EQ(vtable_index, super_method->GetMethodIndex());
7298 DCHECK_NE(super_ifcount, 0u);
7299 if (super_method->IsDefault()) {
7300 if (UNLIKELY(super_method->IsDefaultConflicting())) {
7301 // Some of the default methods that contributed to the conflict in the superclass
7302 // may be masked by new interfaces. Walk over all the interfaces and update state
7303 // as long as the current state is not `kDefaultConflict`.
7304 size_t i = super_ifcount;
7305 while (GetState() != State::kDefaultConflict && i != 0u) {
7306 --i;
7307 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7308 DCHECK(iface == super_iftable->GetInterface(i));
7309 auto [found, index] =
7310 MethodArrayContains(super_iftable->GetMethodArrayOrNull(i), super_method);
7311 if (found) {
7312 ArtMethod* interface_method = iface->GetVirtualMethod(index, kPointerSize);
7313 auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7314 // Note: The `iftable` has method arrays in range [super_ifcount, ifcount) filled
7315 // with vtable indexes but the range [0, super_ifcount) is empty, so we need to
7316 // use the `super_iftable` filled with implementation methods for that range.
7317 return ContainsImplementingMethod(
7318 super_iftable, i + 1u, super_ifcount, iface, super_method) ||
7319 ContainsImplementingMethod(
7320 iftable, super_ifcount, ifcount, iface, vtable_index);
7321 };
7322 UpdateStateImpl(iface, interface_method, slow_is_masked);
7323 }
7324 }
7325 if (GetState() == State::kDefaultConflict) {
7326 SetState(State::kUseSuperMethod);
7327 }
7328 } else {
7329 // There was exactly one default method in superclass interfaces that was
7330 // not masked by subinterfaces. Use `UpdateState()` to process it and pass
7331 // `super_ifcount - 1` as index for checking if it's been masked by new interfaces.
7332 ObjPtr<mirror::Class> iface = super_method->GetDeclaringClass();
7333 UpdateState(
7334 iface, super_method, vtable_index, iftable, ifcount, /*index=*/ super_ifcount - 1u);
7335 if (GetMainMethod() == super_method) {
7336 DCHECK(GetState() == State::kDefault) << enum_cast<uint32_t>(GetState());
7337 SetState(State::kUseSuperMethod);
7338 }
7339 }
7340 } else {
7341 DCHECK(super_method->IsMiranda());
7342 // Any default methods with this signature in superclass interfaces have been
7343 // masked by subinterfaces. Check if we can reuse the miranda method.
7344 if (GetState() == State::kAbstractSingle || GetState() == State::kAbstract) {
7345 SetState(State::kUseSuperMethod);
7346 }
7347 }
7348 }
7349
7350 private:
7351 template <typename Predicate>
7352 ALWAYS_INLINE
7353 void UpdateStateImpl(ObjPtr<mirror::Class> iface,
7354 ArtMethod* interface_method,
7355 Predicate&& slow_is_masked)
7356 REQUIRES_SHARED(Locks::mutator_lock_) {
7357 bool have_default = false;
7358 switch (GetState()) {
7359 case State::kDefaultSingle:
7360 have_default = true;
7361 FALLTHROUGH_INTENDED;
7362 case State::kAbstractSingle:
7363 if (GetMainMethod()->GetDeclaringClass()->Implements(iface)) {
7364 return; // The main method masks the `interface_method`.
7365 }
7366 if (!interface_method->IsDefault()) {
7367 SetState(have_default ? State::kDefault : State::kAbstract);
7368 return;
7369 }
7370 break;
7371 case State::kDefault:
7372 have_default = true;
7373 FALLTHROUGH_INTENDED;
7374 case State::kAbstract:
7375 if (!interface_method->IsDefault()) {
7376 return; // Keep the same state. We do not need to check for masking.
7377 }
7378 // We do not record all overriding methods, so we need to walk over all
7379 // interfaces that could mask the `interface_method`. The provided
7380 // predicate `slow_is_masked()` does that.
7381 if (slow_is_masked()) {
7382 return; // Found an overriding method that masks `interface_method`.
7383 }
7384 break;
7385 case State::kDefaultConflict:
7386 return; // The state cannot change anymore.
7387 default:
7388 LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(GetState());
7389 UNREACHABLE();
7390 }
7391 // We have a new default method that's not masked by any other method.
7392 DCHECK(interface_method->IsDefault());
7393 if (have_default) {
7394 SetState(State::kDefaultConflict);
7395 } else {
7396 SetMainMethod(interface_method);
7397 SetState(State::kDefault);
7398 }
7399 }
7400
7401 // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7402 // that declares a method with the same name and signature as 'interface_method'.
7403 //
7404 // Arguments
7405 // - iftable: The iftable we are searching for an overriding method.
7406 // - begin: The start of the range to search.
7407 // - end: The end of the range to search.
7408 // - iface: The interface we are checking to see if anything overrides.
7409 // - interface_method:
7410 // The interface method providing a name and signature we're searching for.
7411 //
7412 // Returns whether an overriding method was found in any subinterface of `iface`.
7413 static bool ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,
7414 size_t begin,
7415 size_t end,
7416 ObjPtr<mirror::Class> iface,
7417 ArtMethod* interface_method)
7418 REQUIRES_SHARED(Locks::mutator_lock_) {
7419 for (size_t i = begin; i != end; ++i) {
7420 ObjPtr<mirror::Class> current_iface = iftable->GetInterface(i);
7421 for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(kPointerSize)) {
7422 if (MethodSignatureEquals(&current_method, interface_method)) {
7423 // Check if the i'th interface is a subtype of this one.
7424 if (current_iface->Implements(iface)) {
7425 return true;
7426 }
7427 break;
7428 }
7429 }
7430 }
7431 return false;
7432 }
7433
7434 // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7435 // that declares a method implemented by 'target'. This is an optimized version of
7436 // `ContainsOverridingMethodOf()` that searches implementation method arrays instead
7437 // of comparing signatures for declared interface methods.
7438 //
7439 // Arguments
7440 // - iftable: The iftable we are searching for an overriding method.
7441 // - begin: The start of the range to search.
7442 // - end: The end of the range to search.
7443 // - iface: The interface we are checking to see if anything overrides.
7444 // - target: The implementation method we're searching for.
7445 // Note that the new `iftable` is filled with vtable indexes for new interfaces,
7446 // so this needs to be the vtable index if we're searching that range.
7447 //
7448 // Returns whether the `target` was found in a method array for any subinterface of `iface`.
7449 template <typename TargetType>
7450 static bool ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,
7451 size_t begin,
7452 size_t end,
7453 ObjPtr<mirror::Class> iface,
7454 TargetType target)
7455 REQUIRES_SHARED(Locks::mutator_lock_) {
7456 for (size_t i = begin; i != end; ++i) {
7457 if (MethodArrayContains(iftable->GetMethodArrayOrNull(i), target).first &&
7458 iftable->GetInterface(i)->Implements(iface)) {
7459 return true;
7460 }
7461 }
7462 return false;
7463 }
7464
7465 template <typename TargetType>
7466 static std::pair<bool, size_t> MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,
7467 TargetType target)
7468 REQUIRES_SHARED(Locks::mutator_lock_) {
7469 size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
7470 for (size_t j = 0; j != num_methods; ++j) {
7471 if (method_array->GetElementPtrSize<TargetType, kPointerSize>(j) == target) {
7472 return {true, j};
7473 }
7474 }
7475 return {false, 0};
7476 }
7477
7478 ArtMethod* main_method_;
7479 uint32_t method_index_;
7480 State state_;
7481 };
7482
7483 class CopiedMethodRecordEmptyFn {
7484 public:
7485 void MakeEmpty(CopiedMethodRecord& item) const {
7486 item = CopiedMethodRecord();
7487 }
7488 bool IsEmpty(const CopiedMethodRecord& item) const {
7489 return item.GetMainMethod() == nullptr;
7490 }
7491 };
7492
7493 class CopiedMethodRecordHash {
7494 public:
7495 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7496 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7497 DCHECK(method != nullptr);
7498 return ComputeMethodHash(method);
7499 }
7500
7501 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7502 size_t operator()(const CopiedMethodRecord& record) const NO_THREAD_SAFETY_ANALYSIS {
7503 return (*this)(record.GetMainMethod());
7504 }
7505 };
7506
7507 class CopiedMethodRecordEqual {
7508 public:
7509 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7510 bool operator()(const CopiedMethodRecord& lhs_record,
7511 ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7512 ArtMethod* lhs = lhs_record.GetMainMethod();
7513 DCHECK(lhs != nullptr);
7514 DCHECK(rhs != nullptr);
7515 return MethodSignatureEquals(lhs, rhs);
7516 }
7517
7518 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7519 bool operator()(const CopiedMethodRecord& lhs_record,
7520 const CopiedMethodRecord& rhs_record) const NO_THREAD_SAFETY_ANALYSIS {
7521 return (*this)(lhs_record, rhs_record.GetMainMethod());
7522 }
7523 };
7524
7525 using CopiedMethodRecordSet = ScopedArenaHashSet<CopiedMethodRecord,
7526 CopiedMethodRecordEmptyFn,
7527 CopiedMethodRecordHash,
7528 CopiedMethodRecordEqual>;
7529
Vladimir Markob91402f2021-12-21 15:55:06 +00007530 static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize);
7531 static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize);
7532
Vladimir Marko921094a2017-01-12 18:37:06 +00007533 ClassLinker* class_linker_;
7534 Handle<mirror::Class> klass_;
Vladimir Marko921094a2017-01-12 18:37:06 +00007535 Thread* const self_;
Vladimir Marko78f62d82022-01-10 16:25:19 +00007536 Runtime* const runtime_;
Vladimir Marko921094a2017-01-12 18:37:06 +00007537
7538 // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
7539 // the virtual methods array.
7540 // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
7541 // during cross compilation.
7542 // Use the linear alloc pool since this one is in the low 4gb for the compiler.
7543 ArenaStack stack_;
7544 ScopedArenaAllocator allocator_;
7545
Vladimir Markod5d11d92021-02-02 16:24:25 +00007546 // If there are multiple methods with the same signature in the superclass vtable
7547 // (which can happen with a new virtual method having the same signature as an
7548 // inaccessible package-private method from another package in the superclass),
7549 // we keep singly-linked lists in this single array that maps vtable index to the
7550 // next vtable index in the list, `dex::kDexNoIndex` denotes the end of a list.
7551 ArrayRef<uint32_t> same_signature_vtable_lists_;
7552
Vladimir Marko19366b82022-01-18 10:41:28 +00007553 // Avoid large allocation for a few copied method records.
Vladimir Markobc893672021-11-10 15:25:46 +00007554 // Keep the initial buffer on the stack to avoid arena allocations
7555 // if there are no special cases (the first arena allocation is costly).
Vladimir Marko19366b82022-01-18 10:41:28 +00007556 static constexpr size_t kCopiedMethodRecordInitialBufferSize = 16u;
7557 CopiedMethodRecord copied_method_records_initial_buffer_[kCopiedMethodRecordInitialBufferSize];
7558 CopiedMethodRecordSet copied_method_records_;
7559 size_t num_new_copied_methods_;
Vladimir Marko921094a2017-01-12 18:37:06 +00007560};
7561
Vladimir Markob91402f2021-12-21 15:55:06 +00007562template <PointerSize kPointerSize>
Vladimir Marko19366b82022-01-18 10:41:28 +00007563NO_INLINE
7564void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror::Class> klass) {
7565 // There should be no thread suspension in this function,
7566 // native allocations do not cause thread suspension.
7567 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Vladimir Marko921094a2017-01-12 18:37:06 +00007568
Vladimir Marko19366b82022-01-18 10:41:28 +00007569 size_t num_new_copied_methods = num_new_copied_methods_;
7570 DCHECK_NE(num_new_copied_methods, 0u);
7571 const size_t old_method_count = klass->NumMethods();
7572 const size_t new_method_count = old_method_count + num_new_copied_methods;
Vladimir Marko921094a2017-01-12 18:37:06 +00007573
7574 // Attempt to realloc to save RAM if possible.
Vladimir Marko19366b82022-01-18 10:41:28 +00007575 LengthPrefixedArray<ArtMethod>* old_methods = klass->GetMethodsPtr();
Vladimir Marko921094a2017-01-12 18:37:06 +00007576 // The Realloced virtual methods aren't visible from the class roots, so there is no issue
7577 // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
7578 // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
7579 // CopyFrom has internal read barriers.
7580 //
7581 // TODO We should maybe move some of this into mirror::Class or at least into another method.
7582 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
Vladimir Markob91402f2021-12-21 15:55:06 +00007583 kMethodSize,
7584 kMethodAlignment);
Vladimir Marko921094a2017-01-12 18:37:06 +00007585 const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
Vladimir Markob91402f2021-12-21 15:55:06 +00007586 kMethodSize,
7587 kMethodAlignment);
Vladimir Marko921094a2017-01-12 18:37:06 +00007588 const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
7589 auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
Vladimir Marko19366b82022-01-18 10:41:28 +00007590 class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader())->Realloc(
Vladimir Marko921094a2017-01-12 18:37:06 +00007591 self_, old_methods, old_methods_ptr_size, new_size));
7592 CHECK(methods != nullptr); // Native allocation failure aborts.
7593
Vladimir Marko921094a2017-01-12 18:37:06 +00007594 if (methods != old_methods) {
Vladimir Markob91402f2021-12-21 15:55:06 +00007595 StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
Vladimir Marko19366b82022-01-18 10:41:28 +00007596 // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
7597 // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
7598 for (auto& m : klass->GetMethods(kPointerSize)) {
Vladimir Markob91402f2021-12-21 15:55:06 +00007599 out->CopyFrom(&m, kPointerSize);
Vladimir Marko921094a2017-01-12 18:37:06 +00007600 ++out;
7601 }
7602 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007603
7604 // Collect and sort copied method records by the vtable index. This places overriding
7605 // copied methods first, sorted by the vtable index already assigned in the superclass,
7606 // followed by copied methods with new signatures in the order in which we encountered
7607 // them when going over virtual methods of new interfaces.
7608 // This order is deterministic but implementation-defined.
7609 //
7610 // Avoid arena allocation for a few records (the first arena allocation is costly).
7611 constexpr size_t kSortedRecordsBufferSize = 16;
7612 CopiedMethodRecord* sorted_records_buffer[kSortedRecordsBufferSize];
7613 CopiedMethodRecord** sorted_records = (num_new_copied_methods <= kSortedRecordsBufferSize)
7614 ? sorted_records_buffer
7615 : allocator_.AllocArray<CopiedMethodRecord*>(num_new_copied_methods);
7616 size_t filled_sorted_records = 0u;
7617 for (CopiedMethodRecord& record : copied_method_records_) {
7618 if (record.GetState() != CopiedMethodRecord::State::kUseSuperMethod) {
7619 DCHECK_LT(filled_sorted_records, num_new_copied_methods);
7620 sorted_records[filled_sorted_records] = &record;
7621 ++filled_sorted_records;
Vladimir Marko921094a2017-01-12 18:37:06 +00007622 }
7623 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007624 DCHECK_EQ(filled_sorted_records, num_new_copied_methods);
7625 std::sort(sorted_records,
7626 sorted_records + num_new_copied_methods,
7627 [](const CopiedMethodRecord* lhs, const CopiedMethodRecord* rhs) {
7628 return lhs->GetMethodIndex() < rhs->GetMethodIndex();
7629 });
7630
7631 if (klass->IsInterface()) {
7632 // Some records may have been pruned. Update method indexes in collected records.
7633 size_t interface_method_index = klass->NumDeclaredVirtualMethods();
7634 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7635 CopiedMethodRecord* record = sorted_records[i];
7636 DCHECK_LE(interface_method_index, record->GetMethodIndex());
7637 record->SetMethodIndex(interface_method_index);
7638 ++interface_method_index;
Vladimir Marko921094a2017-01-12 18:37:06 +00007639 }
7640 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007641
7642 // Add copied methods.
Vladimir Marko921094a2017-01-12 18:37:06 +00007643 methods->SetSize(new_method_count);
Vladimir Marko19366b82022-01-18 10:41:28 +00007644 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7645 const CopiedMethodRecord* record = sorted_records[i];
7646 ArtMethod* interface_method = record->GetMainMethod();
7647 DCHECK(!interface_method->IsCopied());
7648 ArtMethod& new_method = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7649 new_method.CopyFrom(interface_method, kPointerSize);
7650 new_method.SetMethodIndex(dchecked_integral_cast<uint16_t>(record->GetMethodIndex()));
7651 switch (record->GetState()) {
7652 case CopiedMethodRecord::State::kAbstractSingle:
7653 case CopiedMethodRecord::State::kAbstract: {
7654 DCHECK(!klass->IsInterface()); // We do not create miranda methods for interfaces.
7655 uint32_t access_flags = new_method.GetAccessFlags();
7656 DCHECK_EQ(access_flags & (kAccAbstract | kAccIntrinsic | kAccDefault), kAccAbstract)
7657 << "Miranda method should be abstract but not intrinsic or default!";
7658 new_method.SetAccessFlags(access_flags | kAccCopied);
7659 break;
7660 }
7661 case CopiedMethodRecord::State::kDefaultSingle:
7662 case CopiedMethodRecord::State::kDefault: {
7663 DCHECK(!klass->IsInterface()); // We do not copy default methods for interfaces.
7664 // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
7665 // verified yet it shouldn't have methods that are skipping access checks.
7666 // TODO This is rather arbitrary. We should maybe support classes where only some of its
7667 // methods are skip_access_checks.
7668 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
7669 constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
7670 constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
7671 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
7672 break;
7673 }
7674 case CopiedMethodRecord::State::kDefaultConflict: {
7675 // This is a type of default method (there are default method impls, just a conflict)
7676 // so mark this as a default. We use the `kAccAbstract` flag to distinguish it from
7677 // invokable copied default method without using a separate access flag but the default
7678 // conflicting method is technically not abstract and ArtMethod::IsAbstract() shall
7679 // return false. Also clear the kAccSkipAccessChecks bit since this class hasn't been
7680 // verified yet it shouldn't have methods that are skipping access checks. Also clear
7681 // potential kAccSingleImplementation to avoid CHA trying to inline the default method.
7682 uint32_t access_flags = new_method.GetAccessFlags();
7683 DCHECK_EQ(access_flags & (kAccNative | kAccIntrinsic), 0u);
7684 constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
7685 constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
7686 new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
7687 DCHECK(new_method.IsDefaultConflicting());
7688 DCHECK(!new_method.IsAbstract());
7689 // The actual method might or might not be marked abstract since we just copied it from
7690 // a (possibly default) interface method. We need to set its entry point to be the bridge
7691 // so that the compiler will not invoke the implementation of whatever method we copied
7692 // from.
7693 EnsureThrowsInvocationError(class_linker_, &new_method);
7694 break;
7695 }
7696 default:
7697 LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(record->GetState());
7698 UNREACHABLE();
7699 }
7700 }
7701
7702 if (VLOG_IS_ON(class_linker)) {
7703 LogNewVirtuals(methods);
7704 }
7705
7706 class_linker_->UpdateClassMethods(klass, methods);
Vladimir Marko921094a2017-01-12 18:37:06 +00007707}
7708
Vladimir Markob91402f2021-12-21 15:55:06 +00007709template <PointerSize kPointerSize>
Vladimir Marko0441d202022-02-18 13:55:15 +00007710bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
7711 Handle<mirror::Class> klass,
7712 MutableHandle<mirror::IfTable> iftable,
7713 Handle<mirror::PointerArray> vtable,
Vladimir Marko51718132022-02-07 16:31:08 +00007714 bool is_klass_abstract,
7715 bool is_super_abstract,
Vladimir Marko19366b82022-01-18 10:41:28 +00007716 bool* out_new_conflict,
7717 ArtMethod** out_imt) {
Vladimir Marko19366b82022-01-18 10:41:28 +00007718 size_t ifcount = iftable->Count();
Vladimir Marko0441d202022-02-18 13:55:15 +00007719 // We do not need a read barrier here as the length is constant, both from-space and
7720 // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
7721 size_t super_ifcount =
7722 klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
Vladimir Marko921094a2017-01-12 18:37:06 +00007723
Vladimir Marko0441d202022-02-18 13:55:15 +00007724 ClassLinker* class_linker = nullptr;
Vladimir Marko51718132022-02-07 16:31:08 +00007725 ArtMethod* unimplemented_method = nullptr;
7726 ArtMethod* imt_conflict_method = nullptr;
7727 uintptr_t imt_methods_begin = 0u;
7728 size_t imt_methods_size = 0u;
7729 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
7730 DCHECK_EQ(klass->GetSuperClass()->ShouldHaveImt(), !is_super_abstract);
7731 if (!is_klass_abstract) {
Vladimir Marko0441d202022-02-18 13:55:15 +00007732 class_linker = class_linker_;
Vladimir Marko51718132022-02-07 16:31:08 +00007733 unimplemented_method = runtime_->GetImtUnimplementedMethod();
7734 imt_conflict_method = runtime_->GetImtConflictMethod();
7735 if (is_super_abstract) {
7736 // There was no IMT in superclass to copy to `out_imt[]`, so we need
7737 // to fill it with all implementation methods from superclass.
7738 DCHECK_EQ(imt_methods_begin, 0u);
7739 imt_methods_size = std::numeric_limits<size_t>::max(); // No method at the last byte.
7740 } else {
7741 // If the superclass has IMT, we have already copied it to `out_imt[]` and
7742 // we do not need to call `SetIMTRef()` for interfaces from superclass when
7743 // the implementation method is already in the superclass, only for new methods.
7744 // For simplicity, use the entire method array including direct methods.
7745 LengthPrefixedArray<ArtMethod>* const new_methods = klass->GetMethodsPtr();
7746 if (new_methods != nullptr) {
7747 DCHECK_NE(new_methods->size(), 0u);
7748 imt_methods_begin = reinterpret_cast<uintptr_t>(&new_methods->At(0));
7749 imt_methods_size = new_methods->size() * kMethodSize;
7750 }
7751 }
7752 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007753
Vladimir Marko0441d202022-02-18 13:55:15 +00007754 auto update_imt = [=](ObjPtr<mirror::Class> iface, size_t j, ArtMethod* implementation)
7755 REQUIRES_SHARED(Locks::mutator_lock_) {
7756 // Place method in imt if entry is empty, place conflict otherwise.
7757 ArtMethod** imt_ptr = &out_imt[iface->GetVirtualMethod(j, kPointerSize)->GetImtIndex()];
7758 class_linker->SetIMTRef(unimplemented_method,
7759 imt_conflict_method,
7760 implementation,
7761 /*out*/out_new_conflict,
7762 /*out*/imt_ptr);
7763 };
7764
Vladimir Marko19366b82022-01-18 10:41:28 +00007765 // For interfaces inherited from superclass, the new method arrays are empty,
7766 // so use vtable indexes from implementation methods from the superclass method array.
7767 for (size_t i = 0; i != super_ifcount; ++i) {
7768 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
Vladimir Marko0441d202022-02-18 13:55:15 +00007769 DCHECK(method_array == klass->GetSuperClass()->GetIfTable()->GetMethodArrayOrNull(i));
Vladimir Marko19366b82022-01-18 10:41:28 +00007770 if (method_array == nullptr) {
7771 continue;
7772 }
7773 size_t num_methods = method_array->GetLength();
7774 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
Vladimir Marko0441d202022-02-18 13:55:15 +00007775 size_t j = 0;
7776 // First loop has method array shared with the super class.
7777 for (; j != num_methods; ++j) {
Vladimir Marko19366b82022-01-18 10:41:28 +00007778 ArtMethod* super_implementation =
Vladimir Marko0441d202022-02-18 13:55:15 +00007779 method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7780 size_t vtable_index = super_implementation->GetMethodIndex();
7781 ArtMethod* implementation =
7782 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7783 // Check if we need to update IMT with this method, see above.
7784 if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
7785 update_imt(iface, j, implementation);
7786 }
7787 if (implementation != super_implementation) {
7788 // Copy-on-write and move to the next loop.
7789 Thread* self = self_;
7790 StackHandleScope<2u> hs(self);
7791 Handle<mirror::PointerArray> old_method_array = hs.NewHandle(method_array);
7792 HandleWrapperObjPtr<mirror::Class> h_iface = hs.NewHandleWrapper(&iface);
7793 if (ifcount == super_ifcount && iftable.Get() == klass->GetSuperClass()->GetIfTable()) {
7794 ObjPtr<mirror::IfTable> new_iftable = ObjPtr<mirror::IfTable>::DownCast(
7795 mirror::ObjectArray<mirror::Object>::CopyOf(
7796 iftable, self, ifcount * mirror::IfTable::kMax));
7797 if (new_iftable == nullptr) {
7798 return false;
7799 }
7800 iftable.Assign(new_iftable);
7801 }
7802 method_array = ObjPtr<mirror::PointerArray>::DownCast(
7803 mirror::Array::CopyOf(old_method_array, self, num_methods));
7804 if (method_array == nullptr) {
7805 return false;
7806 }
7807 iftable->SetMethodArray(i, method_array);
7808 method_array->SetElementPtrSize(j, implementation, kPointerSize);
7809 ++j;
7810 break;
7811 }
7812 }
7813 // Second loop (if non-empty) has method array different from the superclass.
7814 for (; j != num_methods; ++j) {
7815 ArtMethod* super_implementation =
7816 method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
Vladimir Marko19366b82022-01-18 10:41:28 +00007817 size_t vtable_index = super_implementation->GetMethodIndex();
7818 ArtMethod* implementation =
7819 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7820 method_array->SetElementPtrSize(j, implementation, kPointerSize);
Vladimir Marko51718132022-02-07 16:31:08 +00007821 // Check if we need to update IMT with this method, see above.
7822 if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
Vladimir Marko0441d202022-02-18 13:55:15 +00007823 update_imt(iface, j, implementation);
Vladimir Marko51718132022-02-07 16:31:08 +00007824 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007825 }
7826 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007827
Vladimir Marko19366b82022-01-18 10:41:28 +00007828 // New interface method arrays contain vtable indexes. Translate them to methods.
Vladimir Marko51718132022-02-07 16:31:08 +00007829 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
Vladimir Marko19366b82022-01-18 10:41:28 +00007830 for (size_t i = super_ifcount; i != ifcount; ++i) {
7831 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
7832 if (method_array == nullptr) {
7833 continue;
Vladimir Marko921094a2017-01-12 18:37:06 +00007834 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007835 size_t num_methods = method_array->GetLength();
7836 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7837 for (size_t j = 0; j != num_methods; ++j) {
7838 size_t vtable_index = method_array->GetElementPtrSize<size_t, kPointerSize>(j);
7839 ArtMethod* implementation =
7840 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7841 method_array->SetElementPtrSize(j, implementation, kPointerSize);
Vladimir Marko51718132022-02-07 16:31:08 +00007842 if (!is_klass_abstract) {
Vladimir Marko0441d202022-02-18 13:55:15 +00007843 update_imt(iface, j, implementation);
Vladimir Marko51718132022-02-07 16:31:08 +00007844 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007845 }
7846 }
Vladimir Marko0441d202022-02-18 13:55:15 +00007847
7848 return true;
Vladimir Marko921094a2017-01-12 18:37:06 +00007849}
7850
Vladimir Marko19366b82022-01-18 10:41:28 +00007851NO_INLINE
7852static void ThrowIllegalAccessErrorForImplementingMethod(ObjPtr<mirror::Class> klass,
7853 ArtMethod* vtable_method,
7854 ArtMethod* interface_method)
7855 REQUIRES_SHARED(Locks::mutator_lock_) {
7856 DCHECK(!vtable_method->IsAbstract());
7857 DCHECK(!vtable_method->IsPublic());
7858 ThrowIllegalAccessError(
7859 klass,
7860 "Method '%s' implementing interface method '%s' is not public",
7861 vtable_method->PrettyMethod().c_str(),
7862 interface_method->PrettyMethod().c_str());
Vladimir Marko921094a2017-01-12 18:37:06 +00007863}
7864
Vladimir Markob91402f2021-12-21 15:55:06 +00007865template <PointerSize kPointerSize>
Vladimir Marko0441d202022-02-18 13:55:15 +00007866ObjPtr<mirror::PointerArray> ClassLinker::LinkMethodsHelper<kPointerSize>::AllocPointerArray(
7867 Thread* self, size_t length) {
7868 using PointerArrayType = std::conditional_t<
7869 kPointerSize == PointerSize::k64, mirror::LongArray, mirror::IntArray>;
7870 ObjPtr<mirror::Array> array = PointerArrayType::Alloc(self, length);
7871 return ObjPtr<mirror::PointerArray>::DownCast(array);
7872}
7873
7874template <PointerSize kPointerSize>
7875bool ClassLinker::LinkMethodsHelper<kPointerSize>::AllocateIfTableMethodArrays(
7876 Thread* self,
7877 Handle<mirror::Class> klass,
7878 Handle<mirror::IfTable> iftable) {
7879 DCHECK(!klass->IsInterface());
7880 DCHECK(klass_->HasSuperClass());
7881 const size_t ifcount = iftable->Count();
7882 // We do not need a read barrier here as the length is constant, both from-space and
7883 // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
7884 size_t super_ifcount =
7885 klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
7886 if (ifcount == super_ifcount) {
7887 DCHECK(iftable.Get() == klass_->GetSuperClass()->GetIfTable());
7888 return true;
7889 }
7890
7891 if (kIsDebugBuild) {
7892 // The method array references for superclass interfaces have been copied.
7893 // We shall allocate new arrays if needed (copy-on-write) in `FinalizeIfTable()`.
7894 ObjPtr<mirror::IfTable> super_iftable = klass_->GetSuperClass()->GetIfTable();
7895 for (size_t i = 0; i != super_ifcount; ++i) {
7896 CHECK(iftable->GetInterface(i) == super_iftable->GetInterface(i));
7897 CHECK(iftable->GetMethodArrayOrNull(i) == super_iftable->GetMethodArrayOrNull(i));
7898 }
7899 }
7900
7901 for (size_t i = super_ifcount; i < ifcount; ++i) {
7902 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
7903 if (num_methods > 0) {
7904 ObjPtr<mirror::PointerArray> method_array = AllocPointerArray(self, num_methods);
7905 if (UNLIKELY(method_array == nullptr)) {
7906 self->AssertPendingOOMException();
7907 return false;
7908 }
7909 iftable->SetMethodArray(i, method_array);
7910 }
7911 }
7912 return true;
7913}
7914
7915template <PointerSize kPointerSize>
Vladimir Markobed84ef2022-01-21 13:57:14 +00007916size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
Vladimir Marko19366b82022-01-18 10:41:28 +00007917 ObjPtr<mirror::Class> klass,
7918 ObjPtr<mirror::Class> super_class,
Vladimir Marko51718132022-02-07 16:31:08 +00007919 bool is_super_abstract,
Vladimir Marko19366b82022-01-18 10:41:28 +00007920 size_t num_virtual_methods,
7921 ObjPtr<mirror::IfTable> iftable) {
Vladimir Marko8670e042021-12-21 17:55:48 +00007922 DCHECK(!klass->IsInterface());
7923 DCHECK(klass->HasSuperClass());
7924 DCHECK(klass->GetSuperClass() == super_class);
Vladimir Marko8670e042021-12-21 17:55:48 +00007925
7926 // There should be no thread suspension unless we want to throw an exception.
Vladimir Marko7ddae992022-01-18 14:27:20 +00007927 // (We are using `ObjPtr<>` and raw vtable pointers that are invalidated by thread suspension.)
Vladimir Marko8670e042021-12-21 17:55:48 +00007928 std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
7929
7930 // Prepare a hash table with virtual methods from the superclass.
7931 // For the unlikely cases that there are multiple methods with the same signature
7932 // but different vtable indexes, keep an array with indexes of the previous
7933 // methods with the same signature (walked as singly-linked lists).
Vladimir Marko7ddae992022-01-18 14:27:20 +00007934 uint8_t* raw_super_vtable;
7935 size_t super_vtable_length;
Vladimir Marko51718132022-02-07 16:31:08 +00007936 if (is_super_abstract) {
7937 DCHECK(!super_class->ShouldHaveEmbeddedVTable());
Vladimir Marko7ddae992022-01-18 14:27:20 +00007938 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTableDuringLinking();
7939 DCHECK(super_vtable != nullptr);
7940 raw_super_vtable = reinterpret_cast<uint8_t*>(super_vtable.Ptr()) +
7941 mirror::Array::DataOffset(static_cast<size_t>(kPointerSize)).Uint32Value();
7942 super_vtable_length = super_vtable->GetLength();
Vladimir Marko51718132022-02-07 16:31:08 +00007943 } else {
7944 DCHECK(super_class->ShouldHaveEmbeddedVTable());
7945 raw_super_vtable = reinterpret_cast<uint8_t*>(super_class.Ptr()) +
7946 mirror::Class::EmbeddedVTableOffset(kPointerSize).Uint32Value();
7947 super_vtable_length = super_class->GetEmbeddedVTableLength();
Vladimir Marko7ddae992022-01-18 14:27:20 +00007948 }
7949 VTableAccessor super_vtable_accessor(raw_super_vtable, super_vtable_length);
Vladimir Marko8670e042021-12-21 17:55:48 +00007950 static constexpr double kMinLoadFactor = 0.3;
7951 static constexpr double kMaxLoadFactor = 0.5;
Vladimir Marko19366b82022-01-18 10:41:28 +00007952 static constexpr size_t kMaxStackBuferSize = 256;
7953 const size_t super_vtable_buffer_size = super_vtable_length * 3;
7954 const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
7955 const size_t total_buffer_size = super_vtable_buffer_size + declared_virtuals_buffer_size;
7956 uint32_t* super_vtable_buffer_ptr = (total_buffer_size <= kMaxStackBuferSize)
7957 ? reinterpret_cast<uint32_t*>(alloca(total_buffer_size * sizeof(uint32_t)))
7958 : allocator_.AllocArray<uint32_t>(total_buffer_size);
7959 uint32_t* declared_virtuals_buffer_ptr = super_vtable_buffer_ptr + super_vtable_buffer_size;
Vladimir Marko7ddae992022-01-18 14:27:20 +00007960 VTableSignatureSet super_vtable_signatures(
Vladimir Marko8670e042021-12-21 17:55:48 +00007961 kMinLoadFactor,
7962 kMaxLoadFactor,
Vladimir Marko7ddae992022-01-18 14:27:20 +00007963 VTableSignatureHash(super_vtable_accessor),
7964 VTableSignatureEqual(super_vtable_accessor),
Vladimir Marko19366b82022-01-18 10:41:28 +00007965 super_vtable_buffer_ptr,
7966 super_vtable_buffer_size,
Vladimir Marko8670e042021-12-21 17:55:48 +00007967 allocator_.Adapter());
Vladimir Markobed84ef2022-01-21 13:57:14 +00007968 ArrayRef<uint32_t> same_signature_vtable_lists;
Vladimir Marko8670e042021-12-21 17:55:48 +00007969 // Insert the first `mirror::Object::kVTableLength` indexes with pre-calculated hashes.
7970 DCHECK_GE(super_vtable_length, mirror::Object::kVTableLength);
7971 for (uint32_t i = 0; i != mirror::Object::kVTableLength; ++i) {
7972 size_t hash = class_linker_->object_virtual_method_hashes_[i];
Vladimir Marko163ebe22022-01-07 12:35:36 +00007973 // There are no duplicate signatures in `java.lang.Object`, so use `HashSet<>::PutWithHash()`.
7974 // This avoids equality comparison for the three `java.lang.Object.wait()` overloads.
7975 super_vtable_signatures.PutWithHash(i, hash);
Vladimir Marko8670e042021-12-21 17:55:48 +00007976 }
7977 // Insert the remaining indexes, check for duplicate signatures.
7978 if (super_vtable_length > mirror::Object::kVTableLength) {
7979 for (size_t i = mirror::Object::kVTableLength; i < super_vtable_length; ++i) {
7980 // Use `super_vtable_accessor` for getting the method for hash calculation.
7981 // Letting `HashSet<>::insert()` use the internal accessor copy in the hash
7982 // function prevents the compiler from optimizing this properly because the
7983 // compiler cannot prove that the accessor copy is immutable.
7984 size_t hash = ComputeMethodHash(super_vtable_accessor.GetVTableEntry(i));
7985 auto [it, inserted] = super_vtable_signatures.InsertWithHash(i, hash);
7986 if (UNLIKELY(!inserted)) {
Vladimir Markobed84ef2022-01-21 13:57:14 +00007987 if (same_signature_vtable_lists.empty()) {
7988 same_signature_vtable_lists = ArrayRef<uint32_t>(
7989 allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
7990 std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
Vladimir Markod5d11d92021-02-02 16:24:25 +00007991 same_signature_vtable_lists_ = same_signature_vtable_lists;
Vladimir Marko8670e042021-12-21 17:55:48 +00007992 }
7993 DCHECK_LT(*it, i);
Vladimir Markobed84ef2022-01-21 13:57:14 +00007994 same_signature_vtable_lists[i] = *it;
Vladimir Marko8670e042021-12-21 17:55:48 +00007995 *it = i;
7996 }
7997 }
7998 }
7999
8000 // For each declared virtual method, look for a superclass virtual method
8001 // to override and assign a new vtable index if no method was overridden.
Vladimir Marko19366b82022-01-18 10:41:28 +00008002 DeclaredVirtualSignatureSet declared_virtual_signatures(
8003 kMinLoadFactor,
8004 kMaxLoadFactor,
8005 DeclaredVirtualSignatureHash(klass),
8006 DeclaredVirtualSignatureEqual(klass),
8007 declared_virtuals_buffer_ptr,
8008 declared_virtuals_buffer_size,
8009 allocator_.Adapter());
Vladimir Marko8670e042021-12-21 17:55:48 +00008010 const bool is_proxy_class = klass->IsProxyClass();
8011 size_t vtable_length = super_vtable_length;
Vladimir Marko19366b82022-01-18 10:41:28 +00008012 for (size_t i = 0; i != num_virtual_methods; ++i) {
Vladimir Marko8670e042021-12-21 17:55:48 +00008013 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
Vladimir Marko19366b82022-01-18 10:41:28 +00008014 DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
Vladimir Marko8670e042021-12-21 17:55:48 +00008015 ArtMethod* signature_method = UNLIKELY(is_proxy_class)
8016 ? virtual_method->GetInterfaceMethodForProxyUnchecked(kPointerSize)
8017 : virtual_method;
8018 size_t hash = ComputeMethodHash(signature_method);
Vladimir Marko19366b82022-01-18 10:41:28 +00008019 declared_virtual_signatures.PutWithHash(i, hash);
Vladimir Marko8670e042021-12-21 17:55:48 +00008020 auto it = super_vtable_signatures.FindWithHash(signature_method, hash);
8021 if (it != super_vtable_signatures.end()) {
8022 size_t super_index = *it;
8023 DCHECK_LT(super_index, super_vtable_length);
8024 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(super_index);
8025 // Historical note: Before Android 4.1, an inaccessible package-private
8026 // superclass method would have been incorrectly overridden.
8027 bool overrides = klass->CanAccessMember(super_method->GetDeclaringClass(),
8028 super_method->GetAccessFlags());
Vladimir Markod5d11d92021-02-02 16:24:25 +00008029 if (overrides && super_method->IsFinal()) {
8030 sants.reset();
8031 ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8032 virtual_method->PrettyMethod().c_str(),
8033 super_method->GetDeclaringClassDescriptor());
8034 return 0u;
8035 }
Vladimir Markobed84ef2022-01-21 13:57:14 +00008036 if (UNLIKELY(!same_signature_vtable_lists.empty())) {
Vladimir Markod5d11d92021-02-02 16:24:25 +00008037 // We may override more than one method according to JLS, see b/211854716 .
8038 // We record the highest overridden vtable index here so that we can walk
8039 // the list to find other overridden methods when constructing the vtable.
8040 // However, we walk all the methods to check for final method overriding.
Vladimir Marko8670e042021-12-21 17:55:48 +00008041 size_t current_index = super_index;
Vladimir Markobed84ef2022-01-21 13:57:14 +00008042 while (same_signature_vtable_lists[current_index] != dex::kDexNoIndex) {
8043 DCHECK_LT(same_signature_vtable_lists[current_index], current_index);
8044 current_index = same_signature_vtable_lists[current_index];
Vladimir Marko8670e042021-12-21 17:55:48 +00008045 ArtMethod* current_method = super_vtable_accessor.GetVTableEntry(current_index);
8046 if (klass->CanAccessMember(current_method->GetDeclaringClass(),
8047 current_method->GetAccessFlags())) {
Vladimir Markod5d11d92021-02-02 16:24:25 +00008048 if (current_method->IsFinal()) {
8049 sants.reset();
8050 ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8051 virtual_method->PrettyMethod().c_str(),
8052 current_method->GetDeclaringClassDescriptor());
8053 return 0u;
8054 }
8055 if (!overrides) {
8056 overrides = true;
8057 super_index = current_index;
8058 super_method = current_method;
8059 }
Vladimir Marko8670e042021-12-21 17:55:48 +00008060 }
8061 }
8062 }
8063 if (overrides) {
Vladimir Marko8670e042021-12-21 17:55:48 +00008064 virtual_method->SetMethodIndex(super_index);
8065 continue;
8066 }
8067 }
8068 // The method does not override any method from superclass, so it needs a new vtable index.
8069 virtual_method->SetMethodIndex(vtable_length);
8070 ++vtable_length;
8071 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008072
8073 // Assign vtable indexes for interface methods in new interfaces and store them
8074 // in implementation method arrays. These shall be replaced by actual method
8075 // pointers later. We do not need to do this for superclass interfaces as we can
8076 // get these vtable indexes from implementation methods in superclass iftable.
8077 // Record data for copied methods which shall be referenced by the vtable.
8078 const size_t ifcount = iftable->Count();
8079 ObjPtr<mirror::IfTable> super_iftable = super_class->GetIfTable();
8080 const size_t super_ifcount = super_iftable->Count();
8081 for (size_t i = ifcount; i != super_ifcount; ) {
8082 --i;
8083 DCHECK_LT(i, ifcount);
8084 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8085 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8086 size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
8087 for (size_t j = 0; j != num_methods; ++j) {
8088 ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8089 size_t hash = ComputeMethodHash(interface_method);
8090 ArtMethod* vtable_method = nullptr;
8091 bool found = false;
8092 auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8093 if (it1 != declared_virtual_signatures.end()) {
8094 vtable_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8095 found = true;
8096 } else {
8097 auto it2 = super_vtable_signatures.FindWithHash(interface_method, hash);
8098 if (it2 != super_vtable_signatures.end()) {
8099 // FIXME: If there are multiple vtable methods with the same signature, the one
8100 // with the highest vtable index is not nessarily the one in most-derived class.
8101 // However, we're preserving old behavior for now. b/211854716
8102 vtable_method = super_vtable_accessor.GetVTableEntry(*it2);
8103 found = true;
8104 }
8105 }
8106 uint32_t vtable_index = vtable_length;
8107 if (found) {
8108 DCHECK(vtable_method != nullptr);
8109 if (!vtable_method->IsAbstract() && !vtable_method->IsPublic()) {
8110 sants.reset();
8111 ThrowIllegalAccessErrorForImplementingMethod(klass, vtable_method, interface_method);
8112 return 0u;
8113 }
8114 vtable_index = vtable_method->GetMethodIndexDuringLinking();
8115 if (!vtable_method->IsOverridableByDefaultMethod()) {
8116 method_array->SetElementPtrSize(j, vtable_index, kPointerSize);
8117 continue;
8118 }
8119 }
8120
8121 auto [it, inserted] = copied_method_records_.InsertWithHash(
8122 CopiedMethodRecord(interface_method, vtable_index), hash);
8123 if (found) {
8124 DCHECK_EQ(vtable_index, it->GetMethodIndex());
8125 } else if (inserted) {
8126 DCHECK_EQ(vtable_index, it->GetMethodIndex());
8127 DCHECK_EQ(vtable_index, vtable_length);
8128 ++vtable_length;
8129 } else {
8130 vtable_index = it->GetMethodIndex();
8131 }
8132 method_array->SetElementPtrSize(j, it->GetMethodIndex(), kPointerSize);
8133 if (inserted) {
8134 it->SetState(interface_method->IsAbstract() ? CopiedMethodRecord::State::kAbstractSingle
8135 : CopiedMethodRecord::State::kDefaultSingle);
8136 } else {
8137 it->UpdateState(iface, interface_method, vtable_index, iftable, ifcount, i);
8138 }
8139 }
8140 }
8141 // Finalize copied method records and check if we can reuse some methods from superclass vtable.
8142 size_t num_new_copied_methods = copied_method_records_.size();
8143 for (CopiedMethodRecord& record : copied_method_records_) {
8144 uint32_t vtable_index = record.GetMethodIndex();
8145 if (vtable_index < super_vtable_length) {
8146 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(record.GetMethodIndex());
8147 DCHECK(super_method->IsOverridableByDefaultMethod());
8148 record.FinalizeState(
8149 super_method, vtable_index, iftable, ifcount, super_iftable, super_ifcount);
8150 if (record.GetState() == CopiedMethodRecord::State::kUseSuperMethod) {
8151 --num_new_copied_methods;
8152 }
8153 }
8154 }
8155 num_new_copied_methods_ = num_new_copied_methods;
8156
Vladimir Marko8670e042021-12-21 17:55:48 +00008157 if (UNLIKELY(!IsUint<16>(vtable_length))) {
8158 sants.reset();
8159 ThrowClassFormatError(klass, "Too many methods defined on class: %zd", vtable_length);
8160 return 0u;
8161 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008162
Vladimir Marko8670e042021-12-21 17:55:48 +00008163 return vtable_length;
8164}
8165
8166template <PointerSize kPointerSize>
Vladimir Marko19366b82022-01-18 10:41:28 +00008167bool ClassLinker::LinkMethodsHelper<kPointerSize>::FindCopiedMethodsForInterface(
8168 ObjPtr<mirror::Class> klass,
8169 size_t num_virtual_methods,
8170 ObjPtr<mirror::IfTable> iftable) {
8171 DCHECK(klass->IsInterface());
8172 DCHECK(klass->HasSuperClass());
8173 DCHECK(klass->GetSuperClass()->IsObjectClass());
8174 DCHECK_EQ(klass->GetSuperClass()->GetIfTableCount(), 0);
8175
8176 // There should be no thread suspension unless we want to throw an exception.
8177 // (We are using `ObjPtr<>`s that are invalidated by thread suspension.)
8178 std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8179
8180 // Prepare a `HashSet<>` with the declared virtual methods. These mask any methods
8181 // from superinterfaces, so we can filter out matching superinterface methods.
8182 static constexpr double kMinLoadFactor = 0.3;
8183 static constexpr double kMaxLoadFactor = 0.5;
8184 static constexpr size_t kMaxStackBuferSize = 256;
8185 const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8186 uint32_t* declared_virtuals_buffer_ptr = (declared_virtuals_buffer_size <= kMaxStackBuferSize)
8187 ? reinterpret_cast<uint32_t*>(alloca(declared_virtuals_buffer_size * sizeof(uint32_t)))
8188 : allocator_.AllocArray<uint32_t>(declared_virtuals_buffer_size);
8189 DeclaredVirtualSignatureSet declared_virtual_signatures(
8190 kMinLoadFactor,
8191 kMaxLoadFactor,
8192 DeclaredVirtualSignatureHash(klass),
8193 DeclaredVirtualSignatureEqual(klass),
8194 declared_virtuals_buffer_ptr,
8195 declared_virtuals_buffer_size,
8196 allocator_.Adapter());
8197 for (size_t i = 0; i != num_virtual_methods; ++i) {
8198 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8199 DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8200 size_t hash = ComputeMethodHash(virtual_method);
8201 declared_virtual_signatures.PutWithHash(i, hash);
8202 }
8203
8204 // We do not create miranda methods for interface classes, so we do not need to track
8205 // non-default (abstract) interface methods. The downside is that we cannot use the
8206 // optimized code paths with `CopiedMethodRecord::State::kDefaultSingle` and since
8207 // we do not fill method arrays for interfaces, the method search actually has to
8208 // compare signatures instead of searching for the implementing method.
8209 const size_t ifcount = iftable->Count();
8210 size_t new_method_index = num_virtual_methods;
8211 for (size_t i = ifcount; i != 0u; ) {
8212 --i;
8213 DCHECK_LT(i, ifcount);
8214 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8215 if (!iface->HasDefaultMethods()) {
8216 continue; // No default methods to process.
8217 }
8218 size_t num_methods = iface->NumDeclaredVirtualMethods();
8219 for (size_t j = 0; j != num_methods; ++j) {
8220 ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8221 if (!interface_method->IsDefault()) {
8222 continue; // Do not process this non-default method.
8223 }
8224 size_t hash = ComputeMethodHash(interface_method);
8225 auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8226 if (it1 != declared_virtual_signatures.end()) {
8227 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8228 if (!virtual_method->IsAbstract() && !virtual_method->IsPublic()) {
8229 sants.reset();
8230 ThrowIllegalAccessErrorForImplementingMethod(klass, virtual_method, interface_method);
8231 return false;
8232 }
8233 continue; // This default method is masked by a method declared in this interface.
8234 }
8235
8236 CopiedMethodRecord new_record(interface_method, new_method_index);
8237 auto it = copied_method_records_.FindWithHash(new_record, hash);
8238 if (it == copied_method_records_.end()) {
8239 // Pretend that there is another default method and try to update the state.
8240 // If the `interface_method` is not masked, the state shall change to
8241 // `kDefaultConflict`; if it is masked, the state remains `kDefault`.
8242 new_record.SetState(CopiedMethodRecord::State::kDefault);
8243 new_record.UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8244 if (new_record.GetState() == CopiedMethodRecord::State::kDefaultConflict) {
8245 // Insert the new record with the state `kDefault`.
8246 new_record.SetState(CopiedMethodRecord::State::kDefault);
8247 copied_method_records_.PutWithHash(new_record, hash);
8248 DCHECK_EQ(new_method_index, new_record.GetMethodIndex());
8249 ++new_method_index;
8250 }
8251 } else {
8252 it->UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8253 }
8254 }
8255 }
8256
8257 // Prune records without conflict. (Method indexes are updated in `ReallocMethods()`.)
8258 // We do not copy normal default methods to subinterfaces, instead we find the
8259 // default method with `Class::FindVirtualMethodForInterfaceSuper()` when needed.
8260 size_t num_new_copied_methods = copied_method_records_.size();
8261 for (CopiedMethodRecord& record : copied_method_records_) {
8262 if (record.GetState() != CopiedMethodRecord::State::kDefaultConflict) {
8263 DCHECK(record.GetState() == CopiedMethodRecord::State::kDefault);
8264 record.SetState(CopiedMethodRecord::State::kUseSuperMethod);
8265 --num_new_copied_methods;
8266 }
8267 }
8268 num_new_copied_methods_ = num_new_copied_methods;
8269
8270 return true;
8271}
8272
8273
8274template <PointerSize kPointerSize>
Vladimir Markob91402f2021-12-21 15:55:06 +00008275FLATTEN
Vladimir Marko78f62d82022-01-10 16:25:19 +00008276bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
Vladimir Markobc893672021-11-10 15:25:46 +00008277 Thread* self,
Vladimir Marko78f62d82022-01-10 16:25:19 +00008278 Handle<mirror::Class> klass,
8279 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8280 bool* out_new_conflict,
8281 ArtMethod** out_imt) {
Vladimir Markobc893672021-11-10 15:25:46 +00008282 const size_t num_virtual_methods = klass->NumVirtualMethods();
8283 if (klass->IsInterface()) {
8284 // No vtable.
8285 if (!IsUint<16>(num_virtual_methods)) {
8286 ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
8287 return false;
8288 }
8289 bool has_defaults = false;
8290 // Assign each method an IMT index and set the default flag.
8291 for (size_t i = 0; i < num_virtual_methods; ++i) {
Vladimir Markob91402f2021-12-21 15:55:06 +00008292 ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
Vladimir Markobc893672021-11-10 15:25:46 +00008293 m->SetMethodIndex(i);
8294 if (!m->IsAbstract()) {
8295 // If the dex file does not support default methods, throw ClassFormatError.
8296 // This check is necessary to protect from odd cases, such as native default
8297 // methods, that the dex file verifier permits for old dex file versions. b/157170505
8298 // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
8299 // currently running CTS tests for default methods with dex file version 035 which
8300 // does not support default methods. So, we limit this to native methods. b/157718952
8301 if (m->IsNative()) {
8302 DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
8303 ThrowClassFormatError(klass.Get(),
8304 "Dex file does not support default method '%s'",
8305 m->PrettyMethod().c_str());
8306 return false;
8307 }
Vladimir Markofd0aec42022-01-27 14:27:24 +00008308 if (!m->IsPublic()) {
8309 // The verifier should have caught the non-public method for dex version 37.
8310 // Just warn and skip it since this is from before default-methods so we don't
8311 // really need to care that it has code.
8312 LOG(WARNING) << "Default interface method " << m->PrettyMethod() << " is not public! "
8313 << "This will be a fatal error in subsequent versions of android. "
8314 << "Continuing anyway.";
8315 }
Vladimir Markobc893672021-11-10 15:25:46 +00008316 m->SetAccessFlags(m->GetAccessFlags() | kAccDefault);
8317 has_defaults = true;
8318 }
8319 }
8320 // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
8321 // during initialization. This is a performance optimization. We could simply traverse the
8322 // virtual_methods_ array again during initialization.
8323 if (has_defaults) {
8324 klass->SetHasDefaultMethods();
8325 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00008326 ObjPtr<mirror::IfTable> iftable = SetupInterfaceLookupTable(
8327 self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass));
8328 if (UNLIKELY(iftable == nullptr)) {
8329 self->AssertPendingException();
8330 return false;
8331 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008332 size_t ifcount = iftable->Count();
8333 bool have_super_with_defaults = false;
8334 for (size_t i = 0; i != ifcount; ++i) {
8335 if (iftable->GetInterface(i)->HasDefaultMethods()) {
8336 have_super_with_defaults = true;
8337 break;
8338 }
8339 }
8340 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8341 if (have_super_with_defaults) {
8342 if (!FindCopiedMethodsForInterface(klass.Get(), num_virtual_methods, iftable)) {
8343 self->AssertPendingException();
8344 return false;
8345 }
8346 if (num_new_copied_methods_ != 0u) {
8347 // Re-check the number of methods.
8348 size_t final_num_virtual_methods = num_virtual_methods + num_new_copied_methods_;
8349 if (!IsUint<16>(final_num_virtual_methods)) {
8350 ThrowClassFormatError(
8351 klass.Get(), "Too many methods on interface: %zu", final_num_virtual_methods);
8352 return false;
8353 }
8354 ReallocMethods(klass.Get());
8355 }
8356 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00008357 klass->SetIfTable(iftable);
Vladimir Marko19366b82022-01-18 10:41:28 +00008358 if (kIsDebugBuild) {
8359 // May cause thread suspension, so do this after we're done with `ObjPtr<> iftable`.
8360 ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8361 }
8362 return true;
Vladimir Markob91402f2021-12-21 15:55:06 +00008363 } else if (LIKELY(klass->HasSuperClass())) {
Vladimir Marko78f62d82022-01-10 16:25:19 +00008364 // We set up the interface lookup table now because we need it to determine if we need
8365 // to update any vtable entries with new default method implementations.
Vladimir Markobc893672021-11-10 15:25:46 +00008366 StackHandleScope<3> hs(self);
Vladimir Marko0441d202022-02-18 13:55:15 +00008367 MutableHandle<mirror::IfTable> iftable = hs.NewHandle(UNLIKELY(klass->IsProxyClass())
Vladimir Marko78f62d82022-01-10 16:25:19 +00008368 ? SetupInterfaceLookupTable(self, klass, &allocator_, ProxyInterfacesAccessor(interfaces))
8369 : SetupInterfaceLookupTable(
8370 self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass)));
8371 if (UNLIKELY(iftable == nullptr)) {
8372 self->AssertPendingException();
8373 return false;
8374 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00008375
Vladimir Marko51718132022-02-07 16:31:08 +00008376 // Copy the IMT from superclass if present and needed. Update with new methods later.
Vladimir Marko0441d202022-02-18 13:55:15 +00008377 Handle<mirror::Class> super_class = hs.NewHandle(klass->GetSuperClass());
Vladimir Marko51718132022-02-07 16:31:08 +00008378 bool is_klass_abstract = klass->IsAbstract();
8379 bool is_super_abstract = super_class->IsAbstract();
8380 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8381 DCHECK_EQ(super_class->ShouldHaveImt(), !is_super_abstract);
8382 if (!is_klass_abstract && !is_super_abstract) {
8383 ImTable* super_imt = super_class->GetImt(kPointerSize);
8384 for (size_t i = 0; i < ImTable::kSize; ++i) {
8385 out_imt[i] = super_imt->Get(i, kPointerSize);
8386 }
8387 }
8388
8389 // If there are no new virtual methods and no new interfaces, we can simply reuse
8390 // the vtable from superclass. We may need to make a copy if it's embedded.
Vladimir Marko0441d202022-02-18 13:55:15 +00008391 const size_t super_vtable_length = super_class->GetVTableLength();
Vladimir Marko0f71b192022-02-02 17:20:12 +00008392 if (num_virtual_methods == 0 && iftable.Get() == super_class->GetIfTable()) {
Vladimir Marko51718132022-02-07 16:31:08 +00008393 DCHECK_EQ(is_super_abstract, !super_class->ShouldHaveEmbeddedVTable());
8394 if (is_super_abstract) {
8395 DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
8396 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
8397 CHECK(super_vtable != nullptr) << super_class->PrettyClass();
8398 klass->SetVTable(super_vtable);
8399 // No IMT in the super class, we need to reconstruct it from the iftable.
8400 if (!is_klass_abstract && iftable->Count() != 0) {
8401 class_linker_->FillIMTFromIfTable(iftable.Get(),
8402 runtime_->GetImtUnimplementedMethod(),
8403 runtime_->GetImtConflictMethod(),
8404 klass.Get(),
8405 /*create_conflict_tables=*/false,
8406 /*ignore_copied_methods=*/false,
8407 out_new_conflict,
8408 out_imt);
8409 }
8410 } else {
Vladimir Marko0441d202022-02-18 13:55:15 +00008411 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, super_vtable_length);
Vladimir Marko8670e042021-12-21 17:55:48 +00008412 if (UNLIKELY(vtable == nullptr)) {
8413 self->AssertPendingOOMException();
Vladimir Markobc893672021-11-10 15:25:46 +00008414 return false;
8415 }
Vladimir Marko8670e042021-12-21 17:55:48 +00008416 for (size_t i = 0; i < super_vtable_length; i++) {
8417 vtable->SetElementPtrSize(
8418 i, super_class->GetEmbeddedVTableEntry(i, kPointerSize), kPointerSize);
8419 }
8420 klass->SetVTable(vtable);
Vladimir Marko51718132022-02-07 16:31:08 +00008421 // The IMT was already copied from superclass if `klass` is not abstract.
Vladimir Marko8670e042021-12-21 17:55:48 +00008422 }
Vladimir Markobed84ef2022-01-21 13:57:14 +00008423 klass->SetIfTable(iftable.Get());
Vladimir Marko8670e042021-12-21 17:55:48 +00008424 return true;
8425 }
8426
Vladimir Markobed84ef2022-01-21 13:57:14 +00008427 // Allocate method arrays, so that we can link interface methods without thread suspension,
8428 // otherwise GC could miss visiting newly allocated copied methods.
8429 // TODO: Do not allocate copied methods during linking, store only records about what
8430 // we need to allocate and allocate it at the end. Start with superclass iftable and
8431 // perform copy-on-write when needed to facilitate maximum memory sharing.
Vladimir Marko0441d202022-02-18 13:55:15 +00008432 if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8433 self->AssertPendingOOMException();
Vladimir Markobed84ef2022-01-21 13:57:14 +00008434 return false;
8435 }
8436
Vladimir Marko51718132022-02-07 16:31:08 +00008437 size_t final_vtable_size = AssignVTableIndexes(
8438 klass.Get(), super_class.Get(), is_super_abstract, num_virtual_methods, iftable.Get());
Vladimir Marko8670e042021-12-21 17:55:48 +00008439 if (final_vtable_size == 0u) {
8440 self->AssertPendingException();
8441 return false;
8442 }
8443 DCHECK(IsUint<16>(final_vtable_size));
8444
8445 // Allocate the new vtable.
Vladimir Marko0441d202022-02-18 13:55:15 +00008446 Handle<mirror::PointerArray> vtable = hs.NewHandle(AllocPointerArray(self, final_vtable_size));
Vladimir Marko8670e042021-12-21 17:55:48 +00008447 if (UNLIKELY(vtable == nullptr)) {
8448 self->AssertPendingOOMException();
8449 return false;
8450 }
8451
Vladimir Marko19366b82022-01-18 10:41:28 +00008452 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8453 if (num_new_copied_methods_ != 0u) {
8454 ReallocMethods(klass.Get());
8455 }
8456
Vladimir Marko8670e042021-12-21 17:55:48 +00008457 // Store new virtual methods in the new vtable.
Vladimir Markod5d11d92021-02-02 16:24:25 +00008458 ArrayRef<uint32_t> same_signature_vtable_lists = same_signature_vtable_lists_;
Vladimir Marko8670e042021-12-21 17:55:48 +00008459 for (ArtMethod& virtual_method : klass->GetVirtualMethodsSliceUnchecked(kPointerSize)) {
Vladimir Markod5d11d92021-02-02 16:24:25 +00008460 uint32_t vtable_index = virtual_method.GetMethodIndexDuringLinking();
Vladimir Marko8670e042021-12-21 17:55:48 +00008461 vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
Vladimir Markod5d11d92021-02-02 16:24:25 +00008462 if (UNLIKELY(vtable_index < same_signature_vtable_lists.size())) {
8463 // We may override more than one method according to JLS, see b/211854716 .
8464 // If we do, arbitrarily update the method index to the lowest overridden vtable index.
8465 while (same_signature_vtable_lists[vtable_index] != dex::kDexNoIndex) {
8466 DCHECK_LT(same_signature_vtable_lists[vtable_index], vtable_index);
8467 vtable_index = same_signature_vtable_lists[vtable_index];
8468 ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
8469 if (klass->CanAccessMember(current_method->GetDeclaringClass(),
8470 current_method->GetAccessFlags())) {
8471 DCHECK(!current_method->IsFinal());
8472 vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
8473 virtual_method.SetMethodIndex(vtable_index);
8474 }
8475 }
8476 }
Vladimir Marko8670e042021-12-21 17:55:48 +00008477 }
8478
8479 // For non-overridden vtable slots, copy a method from `super_class`.
8480 for (size_t j = 0; j != super_vtable_length; ++j) {
Vladimir Marko19366b82022-01-18 10:41:28 +00008481 if (vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j) == nullptr) {
8482 ArtMethod* super_method = super_class->GetVTableEntry(j, kPointerSize);
8483 vtable->SetElementPtrSize(j, super_method, kPointerSize);
Vladimir Markobc893672021-11-10 15:25:46 +00008484 }
8485 }
Vladimir Marko8670e042021-12-21 17:55:48 +00008486
Vladimir Marko51718132022-02-07 16:31:08 +00008487 // Update the `iftable` (and IMT) with finalized virtual methods.
Vladimir Marko0441d202022-02-18 13:55:15 +00008488 if (!FinalizeIfTable(klass,
8489 iftable,
8490 vtable,
8491 is_klass_abstract,
8492 is_super_abstract,
8493 out_new_conflict,
8494 out_imt)) {
8495 self->AssertPendingOOMException();
8496 return false;
8497 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008498
Vladimir Markobc893672021-11-10 15:25:46 +00008499 klass->SetVTable(vtable.Get());
Vladimir Marko19366b82022-01-18 10:41:28 +00008500 klass->SetIfTable(iftable.Get());
8501 if (kIsDebugBuild) {
8502 CheckVTable(self, klass, kPointerSize);
8503 ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8504 }
8505 return true;
Vladimir Markobc893672021-11-10 15:25:46 +00008506 } else {
Vladimir Marko78f62d82022-01-10 16:25:19 +00008507 return LinkJavaLangObjectMethods(self, klass);
Vladimir Markobc893672021-11-10 15:25:46 +00008508 }
Vladimir Markobc893672021-11-10 15:25:46 +00008509}
8510
Vladimir Markob91402f2021-12-21 15:55:06 +00008511template <PointerSize kPointerSize>
Vladimir Marko78f62d82022-01-10 16:25:19 +00008512bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkJavaLangObjectMethods(
Vladimir Markob91402f2021-12-21 15:55:06 +00008513 Thread* self,
8514 Handle<mirror::Class> klass) {
8515 DCHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(class_linker_));
8516 DCHECK_EQ(klass->NumVirtualMethods(), mirror::Object::kVTableLength);
8517 static_assert(IsUint<16>(mirror::Object::kVTableLength));
Vladimir Marko0441d202022-02-18 13:55:15 +00008518 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, mirror::Object::kVTableLength);
Vladimir Markob91402f2021-12-21 15:55:06 +00008519 if (UNLIKELY(vtable == nullptr)) {
8520 self->AssertPendingOOMException();
8521 return false;
8522 }
8523 for (size_t i = 0; i < mirror::Object::kVTableLength; ++i) {
8524 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8525 vtable->SetElementPtrSize(i, virtual_method, kPointerSize);
8526 virtual_method->SetMethodIndex(i);
8527 }
8528 klass->SetVTable(vtable);
8529 InitializeObjectVirtualMethodHashes(
8530 klass.Get(),
8531 kPointerSize,
8532 ArrayRef<uint32_t>(class_linker_->object_virtual_method_hashes_));
Vladimir Marko78f62d82022-01-10 16:25:19 +00008533 // The interface table is already allocated but there are no interface methods to link.
8534 DCHECK(klass->GetIfTable() != nullptr);
8535 DCHECK_EQ(klass->GetIfTableCount(), 0);
Vladimir Markob91402f2021-12-21 15:55:06 +00008536 return true;
8537}
8538
Vladimir Markobc893672021-11-10 15:25:46 +00008539// Populate the class vtable and itable. Compute return type indices.
8540bool ClassLinker::LinkMethods(Thread* self,
8541 Handle<mirror::Class> klass,
8542 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8543 bool* out_new_conflict,
8544 ArtMethod** out_imt) {
8545 self->AllowThreadSuspension();
Vladimir Markobc893672021-11-10 15:25:46 +00008546 // Link virtual methods then interface methods.
8547 Runtime* const runtime = Runtime::Current();
Vladimir Markob91402f2021-12-21 15:55:06 +00008548 if (LIKELY(GetImagePointerSize() == kRuntimePointerSize)) {
8549 LinkMethodsHelper<kRuntimePointerSize> helper(this, klass, self, runtime);
Vladimir Marko78f62d82022-01-10 16:25:19 +00008550 return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
Vladimir Markob91402f2021-12-21 15:55:06 +00008551 } else {
8552 constexpr PointerSize kOtherPointerSize =
8553 (kRuntimePointerSize == PointerSize::k64) ? PointerSize::k32 : PointerSize::k64;
8554 LinkMethodsHelper<kOtherPointerSize> helper(this, klass, self, runtime);
Vladimir Marko78f62d82022-01-10 16:25:19 +00008555 return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
Vladimir Markob91402f2021-12-21 15:55:06 +00008556 }
Vladimir Markobc893672021-11-10 15:25:46 +00008557}
8558
Vladimir Marko42bee502021-01-28 14:58:35 +00008559class ClassLinker::LinkFieldsHelper {
8560 public:
8561 static bool LinkFields(ClassLinker* class_linker,
8562 Thread* self,
8563 Handle<mirror::Class> klass,
8564 bool is_static,
8565 size_t* class_size)
8566 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom4873d462011-08-21 15:23:39 -07008567
Vladimir Marko42bee502021-01-28 14:58:35 +00008568 private:
8569 enum class FieldTypeOrder : uint16_t;
8570 class FieldGaps;
8571
8572 struct FieldTypeOrderAndIndex {
8573 FieldTypeOrder field_type_order;
8574 uint16_t field_index;
8575 };
8576
8577 static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
8578
8579 template <size_t kSize>
8580 static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
8581 REQUIRES_SHARED(Locks::mutator_lock_);
8582};
Brian Carlstrom4873d462011-08-21 15:23:39 -07008583
Vladimir Markoc7993d52021-01-27 15:20:56 +00008584// We use the following order of field types for assigning offsets.
8585// Some fields can be shuffled forward to fill gaps, see `ClassLinker::LinkFields()`.
Vladimir Marko42bee502021-01-28 14:58:35 +00008586enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008587 kReference = 0u,
8588 kLong,
8589 kDouble,
8590 kInt,
8591 kFloat,
8592 kChar,
8593 kShort,
8594 kBoolean,
8595 kByte,
8596
8597 kLast64BitType = kDouble,
8598 kLast32BitType = kFloat,
8599 kLast16BitType = kShort,
Brian Carlstromdbc05252011-09-09 01:59:59 -07008600};
8601
Vladimir Markoc7993d52021-01-27 15:20:56 +00008602ALWAYS_INLINE
Vladimir Marko42bee502021-01-28 14:58:35 +00008603ClassLinker::LinkFieldsHelper::FieldTypeOrder
8604ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008605 switch (first_char) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008606 case 'J':
8607 return FieldTypeOrder::kLong;
8608 case 'D':
8609 return FieldTypeOrder::kDouble;
8610 case 'I':
8611 return FieldTypeOrder::kInt;
8612 case 'F':
8613 return FieldTypeOrder::kFloat;
8614 case 'C':
8615 return FieldTypeOrder::kChar;
8616 case 'S':
8617 return FieldTypeOrder::kShort;
8618 case 'Z':
8619 return FieldTypeOrder::kBoolean;
8620 case 'B':
8621 return FieldTypeOrder::kByte;
Vladimir Marko42bee502021-01-28 14:58:35 +00008622 default:
8623 DCHECK(first_char == 'L' || first_char == '[') << first_char;
8624 return FieldTypeOrder::kReference;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008625 }
8626}
8627
8628// Gaps where we can insert fields in object layout.
Vladimir Marko42bee502021-01-28 14:58:35 +00008629class ClassLinker::LinkFieldsHelper::FieldGaps {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008630 public:
8631 template <uint32_t kSize>
8632 ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
8633 static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
8634 if (!IsAligned<kSize>(field_offset.Uint32Value())) {
8635 uint32_t gap_start = field_offset.Uint32Value();
8636 field_offset = MemberOffset(RoundUp(gap_start, kSize));
8637 AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
8638 }
8639 return field_offset;
8640 }
8641
8642 template <uint32_t kSize>
8643 bool HasGap() const {
8644 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8645 return (kSize == 1u && gap1_offset_ != kNoOffset) ||
8646 (kSize <= 2u && gap2_offset_ != kNoOffset) ||
8647 gap4_offset_ != kNoOffset;
8648 }
8649
8650 template <uint32_t kSize>
8651 MemberOffset ReleaseGap() {
8652 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8653 uint32_t result;
8654 if (kSize == 1u && gap1_offset_ != kNoOffset) {
8655 DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
8656 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
8657 result = gap1_offset_;
8658 gap1_offset_ = kNoOffset;
8659 } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
8660 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
8661 result = gap2_offset_;
8662 gap2_offset_ = kNoOffset;
8663 if (kSize < 2u) {
8664 AddGaps<1u>(result + kSize, result + 2u);
8665 }
8666 } else {
8667 DCHECK_NE(gap4_offset_, kNoOffset);
8668 result = gap4_offset_;
8669 gap4_offset_ = kNoOffset;
8670 if (kSize < 4u) {
8671 AddGaps<kSize | 2u>(result + kSize, result + 4u);
8672 }
8673 }
8674 return MemberOffset(result);
8675 }
8676
8677 private:
8678 template <uint32_t kGapsToCheck>
8679 void AddGaps(uint32_t gap_start, uint32_t gap_end) {
8680 if ((kGapsToCheck & 1u) != 0u) {
8681 DCHECK_LT(gap_start, gap_end);
8682 DCHECK_ALIGNED(gap_end, 2u);
8683 if ((gap_start & 1u) != 0u) {
8684 DCHECK_EQ(gap1_offset_, kNoOffset);
8685 gap1_offset_ = gap_start;
8686 gap_start += 1u;
8687 if (kGapsToCheck == 1u || gap_start == gap_end) {
8688 DCHECK_EQ(gap_start, gap_end);
8689 return;
8690 }
8691 }
8692 }
8693
8694 if ((kGapsToCheck & 2u) != 0u) {
8695 DCHECK_LT(gap_start, gap_end);
8696 DCHECK_ALIGNED(gap_start, 2u);
8697 DCHECK_ALIGNED(gap_end, 4u);
8698 if ((gap_start & 2u) != 0u) {
8699 DCHECK_EQ(gap2_offset_, kNoOffset);
8700 gap2_offset_ = gap_start;
8701 gap_start += 2u;
8702 if (kGapsToCheck <= 3u || gap_start == gap_end) {
8703 DCHECK_EQ(gap_start, gap_end);
8704 return;
8705 }
8706 }
8707 }
8708
8709 if ((kGapsToCheck & 4u) != 0u) {
8710 DCHECK_LT(gap_start, gap_end);
8711 DCHECK_ALIGNED(gap_start, 4u);
8712 DCHECK_ALIGNED(gap_end, 8u);
8713 DCHECK_EQ(gap_start + 4u, gap_end);
8714 DCHECK_EQ(gap4_offset_, kNoOffset);
8715 gap4_offset_ = gap_start;
8716 return;
8717 }
8718
8719 DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
8720 << " after checking " << kGapsToCheck;
8721 }
8722
8723 static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
8724
8725 uint32_t gap4_offset_ = kNoOffset;
8726 uint32_t gap2_offset_ = kNoOffset;
8727 uint32_t gap1_offset_ = kNoOffset;
8728};
8729
8730template <size_t kSize>
Vladimir Marko42bee502021-01-28 14:58:35 +00008731ALWAYS_INLINE
8732MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
8733 MemberOffset field_offset) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008734 DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
8735 DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
8736 field->SetOffset(field_offset);
8737 return MemberOffset(field_offset.Uint32Value() + kSize);
8738}
8739
Vladimir Marko42bee502021-01-28 14:58:35 +00008740bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
8741 Thread* self,
8742 Handle<mirror::Class> klass,
8743 bool is_static,
8744 size_t* class_size) {
Ian Rogers7b078e82014-09-10 14:44:24 -07008745 self->AllowThreadSuspension();
Mathieu Chartierc7853442015-03-27 14:35:38 -07008746 const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008747 LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
8748 klass->GetIFieldsPtr();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008749
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008750 // Initialize field_offset
Brian Carlstrom693267a2011-09-06 09:25:34 -07008751 MemberOffset field_offset(0);
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008752 if (is_static) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008753 field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
8754 class_linker->GetImagePointerSize());
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008755 } else {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008756 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07008757 if (super_class != nullptr) {
Brian Carlstromf3632832014-05-20 15:36:53 -07008758 CHECK(super_class->IsResolved())
David Sehr709b0702016-10-13 09:12:37 -07008759 << klass->PrettyClass() << " " << super_class->PrettyClass();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008760 field_offset = MemberOffset(super_class->GetObjectSize());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008761 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008762 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008763
David Sehr709b0702016-10-13 09:12:37 -07008764 CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008765
Brian Carlstromdbc05252011-09-09 01:59:59 -07008766 // we want a relatively stable order so that adding new fields
Elliott Hughesadb460d2011-10-05 17:02:34 -07008767 // minimizes disruption of C++ version such as Class and Method.
Alex Lighte64300b2015-12-15 15:02:47 -08008768 //
8769 // The overall sort order order is:
8770 // 1) All object reference fields, sorted alphabetically.
8771 // 2) All java long (64-bit) integer fields, sorted alphabetically.
8772 // 3) All java double (64-bit) floating point fields, sorted alphabetically.
8773 // 4) All java int (32-bit) integer fields, sorted alphabetically.
8774 // 5) All java float (32-bit) floating point fields, sorted alphabetically.
8775 // 6) All java char (16-bit) integer fields, sorted alphabetically.
8776 // 7) All java short (16-bit) integer fields, sorted alphabetically.
8777 // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
8778 // 9) All java byte (8-bit) integer fields, sorted alphabetically.
8779 //
Vladimir Marko42bee502021-01-28 14:58:35 +00008780 // (References are first to increase the chance of reference visiting
8781 // being able to take a fast path using a bitmap of references at the
8782 // start of the object, see `Class::reference_instance_offsets_`.)
8783 //
Vladimir Markoc7993d52021-01-27 15:20:56 +00008784 // Once the fields are sorted in this order we will attempt to fill any gaps
8785 // that might be present in the memory layout of the structure.
8786 // Note that we shall not fill gaps between the superclass fields.
8787
8788 // Collect fields and their "type order index" (see numbered points above).
Mathieu Chartier2d5f39e2014-09-19 17:52:37 -07008789 const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
Vladimir Markoc7993d52021-01-27 15:20:56 +00008790 "Using plain ArtField references");
Vladimir Marko42bee502021-01-28 14:58:35 +00008791 constexpr size_t kStackBufferEntries = 64; // Avoid allocations for small number of fields.
8792 FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
8793 std::vector<FieldTypeOrderAndIndex> heap_buffer;
8794 ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008795 if (num_fields <= kStackBufferEntries) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008796 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008797 } else {
8798 heap_buffer.resize(num_fields);
Vladimir Marko42bee502021-01-28 14:58:35 +00008799 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
Brian Carlstromdbc05252011-09-09 01:59:59 -07008800 }
Brian Carlstromdbc05252011-09-09 01:59:59 -07008801 size_t num_reference_fields = 0;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008802 size_t primitive_fields_start = num_fields;
Vladimir Marko42bee502021-01-28 14:58:35 +00008803 DCHECK_LE(num_fields, 1u << 16);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008804 for (size_t i = 0; i != num_fields; ++i) {
8805 ArtField* field = &fields->At(i);
8806 const char* descriptor = field->GetTypeDescriptor();
Vladimir Marko42bee502021-01-28 14:58:35 +00008807 FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
8808 uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008809 // Insert references to the start, other fields to the end.
8810 DCHECK_LT(num_reference_fields, primitive_fields_start);
Vladimir Marko42bee502021-01-28 14:58:35 +00008811 if (field_type_order == FieldTypeOrder::kReference) {
8812 sorted_fields[num_reference_fields] = { field_type_order, field_index };
Vladimir Markoc7993d52021-01-27 15:20:56 +00008813 ++num_reference_fields;
8814 } else {
8815 --primitive_fields_start;
Vladimir Marko42bee502021-01-28 14:58:35 +00008816 sorted_fields[primitive_fields_start] = { field_type_order, field_index };
Vladimir Markoc7993d52021-01-27 15:20:56 +00008817 }
8818 }
8819 DCHECK_EQ(num_reference_fields, primitive_fields_start);
Fred Shih381e4ca2014-08-25 17:24:27 -07008820
Vladimir Marko42bee502021-01-28 14:58:35 +00008821 // Reference fields are already sorted by field index (and dex field index).
Vladimir Markoc7993d52021-01-27 15:20:56 +00008822 DCHECK(std::is_sorted(
8823 sorted_fields.begin(),
8824 sorted_fields.begin() + num_reference_fields,
Vladimir Marko42bee502021-01-28 14:58:35 +00008825 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8826 ArtField* lhs_field = &fields->At(lhs.field_index);
8827 ArtField* rhs_field = &fields->At(rhs.field_index);
8828 CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8829 CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8830 CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
8831 lhs.field_index < rhs.field_index);
8832 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008833 }));
Vladimir Marko42bee502021-01-28 14:58:35 +00008834 // Primitive fields were stored in reverse order of their field index (and dex field index).
Vladimir Markoc7993d52021-01-27 15:20:56 +00008835 DCHECK(std::is_sorted(
8836 sorted_fields.begin() + primitive_fields_start,
8837 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00008838 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8839 ArtField* lhs_field = &fields->At(lhs.field_index);
8840 ArtField* rhs_field = &fields->At(rhs.field_index);
8841 CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8842 CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8843 CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
8844 lhs.field_index > rhs.field_index);
8845 return lhs.field_index > rhs.field_index;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008846 }));
8847 // Sort the primitive fields by the field type order, then field index.
8848 std::sort(sorted_fields.begin() + primitive_fields_start,
8849 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00008850 [](const auto& lhs, const auto& rhs) {
8851 if (lhs.field_type_order != rhs.field_type_order) {
8852 return lhs.field_type_order < rhs.field_type_order;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008853 } else {
Vladimir Marko42bee502021-01-28 14:58:35 +00008854 return lhs.field_index < rhs.field_index;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008855 }
8856 });
8857 // Primitive fields are now sorted by field size (descending), then type, then field index.
8858 DCHECK(std::is_sorted(
8859 sorted_fields.begin() + primitive_fields_start,
8860 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00008861 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8862 ArtField* lhs_field = &fields->At(lhs.field_index);
8863 ArtField* rhs_field = &fields->At(rhs.field_index);
8864 Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008865 CHECK_NE(lhs_type, Primitive::kPrimNot);
Vladimir Marko42bee502021-01-28 14:58:35 +00008866 Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008867 CHECK_NE(rhs_type, Primitive::kPrimNot);
8868 if (lhs_type != rhs_type) {
8869 size_t lhs_size = Primitive::ComponentSize(lhs_type);
8870 size_t rhs_size = Primitive::ComponentSize(rhs_type);
8871 return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
8872 } else {
Vladimir Marko42bee502021-01-28 14:58:35 +00008873 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008874 }
8875 }));
8876
8877 // Process reference fields.
8878 FieldGaps field_gaps;
8879 size_t index = 0u;
8880 if (num_reference_fields != 0u) {
8881 constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
8882 field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
8883 for (; index != num_reference_fields; ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008884 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008885 field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008886 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008887 }
Vladimir Markoc7993d52021-01-27 15:20:56 +00008888 // Process 64-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00008889 if (index != num_fields &&
8890 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008891 field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
Vladimir Marko42bee502021-01-28 14:58:35 +00008892 while (index != num_fields &&
8893 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
8894 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008895 field_offset = AssignFieldOffset<8u>(field, field_offset);
8896 ++index;
Mathieu Chartier55650292020-09-14 12:21:04 -07008897 }
Mathieu Chartier55650292020-09-14 12:21:04 -07008898 }
Vladimir Markoc7993d52021-01-27 15:20:56 +00008899 // Process 32-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00008900 if (index != num_fields &&
8901 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008902 field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
8903 if (field_gaps.HasGap<4u>()) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008904 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008905 AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>()); // Ignore return value.
8906 ++index;
8907 DCHECK(!field_gaps.HasGap<4u>()); // There can be only one gap for a 32-bit field.
8908 }
Vladimir Marko42bee502021-01-28 14:58:35 +00008909 while (index != num_fields &&
8910 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
8911 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008912 field_offset = AssignFieldOffset<4u>(field, field_offset);
8913 ++index;
8914 }
8915 }
8916 // Process 16-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00008917 if (index != num_fields &&
8918 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008919 field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
8920 while (index != num_fields &&
Vladimir Marko42bee502021-01-28 14:58:35 +00008921 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
Vladimir Markoc7993d52021-01-27 15:20:56 +00008922 field_gaps.HasGap<2u>()) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008923 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008924 AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>()); // Ignore return value.
8925 ++index;
8926 }
Vladimir Marko42bee502021-01-28 14:58:35 +00008927 while (index != num_fields &&
8928 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
8929 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008930 field_offset = AssignFieldOffset<2u>(field, field_offset);
8931 ++index;
8932 }
8933 }
8934 // Process 8-bit fields.
8935 for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008936 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008937 AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>()); // Ignore return value.
8938 }
8939 for (; index != num_fields; ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008940 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008941 field_offset = AssignFieldOffset<1u>(field, field_offset);
8942 }
8943
Ian Rogers7b078e82014-09-10 14:44:24 -07008944 self->EndAssertNoThreadSuspension(old_no_suspend_cause);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008945
Elliott Hughesadb460d2011-10-05 17:02:34 -07008946 // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00008947 DCHECK_IMPLIES(class_linker->init_done_, !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
Vladimir Marko42bee502021-01-28 14:58:35 +00008948 if (!is_static &&
8949 UNLIKELY(!class_linker->init_done_) &&
8950 klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
Elliott Hughesadb460d2011-10-05 17:02:34 -07008951 // We know there are no non-reference fields in the Reference classes, and we know
8952 // that 'referent' is alphabetically last, so this is easy...
David Sehr709b0702016-10-13 09:12:37 -07008953 CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008954 CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
David Sehr709b0702016-10-13 09:12:37 -07008955 << klass->PrettyClass();
Elliott Hughesadb460d2011-10-05 17:02:34 -07008956 --num_reference_fields;
8957 }
8958
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008959 size_t size = field_offset.Uint32Value();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008960 // Update klass
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008961 if (is_static) {
8962 klass->SetNumReferenceStaticFields(num_reference_fields);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008963 *class_size = size;
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008964 } else {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008965 klass->SetNumReferenceInstanceFields(num_reference_fields);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008966 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008967 if (num_reference_fields == 0 || super_class == nullptr) {
8968 // object has one reference field, klass, but we ignore it since we always visit the class.
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008969 // super_class is null iff the class is java.lang.Object.
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008970 if (super_class == nullptr ||
8971 (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
8972 klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008973 }
8974 }
8975 if (kIsDebugBuild) {
8976 DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
8977 size_t total_reference_instance_fields = 0;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008978 ObjPtr<mirror::Class> cur_super = klass.Get();
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008979 while (cur_super != nullptr) {
8980 total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
8981 cur_super = cur_super->GetSuperClass();
8982 }
8983 if (super_class == nullptr) {
David Sehr709b0702016-10-13 09:12:37 -07008984 CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008985 } else {
8986 // Check that there is at least num_reference_fields other than Object.class.
8987 CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
David Sehr709b0702016-10-13 09:12:37 -07008988 << klass->PrettyClass();
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008989 }
8990 }
Brian Carlstromdbc05252011-09-09 01:59:59 -07008991 if (!klass->IsVariableSize()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07008992 std::string temp;
8993 DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
8994 size_t previous_size = klass->GetObjectSize();
8995 if (previous_size != 0) {
8996 // Make sure that we didn't originally have an incorrect size.
8997 CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
Mathieu Chartier79b4f382013-10-23 15:21:37 -07008998 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07008999 klass->SetObjectSize(size);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009000 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009001 }
Vladimir Marko76649e82014-11-10 18:32:59 +00009002
9003 if (kIsDebugBuild) {
9004 // Make sure that the fields array is ordered by name but all reference
9005 // offsets are at the beginning as far as alignment allows.
9006 MemberOffset start_ref_offset = is_static
Vladimir Marko42bee502021-01-28 14:58:35 +00009007 ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
Vladimir Marko76649e82014-11-10 18:32:59 +00009008 : klass->GetFirstReferenceInstanceFieldOffset();
9009 MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
9010 num_reference_fields *
9011 sizeof(mirror::HeapReference<mirror::Object>));
9012 MemberOffset current_ref_offset = start_ref_offset;
9013 for (size_t i = 0; i < num_fields; i++) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07009014 ArtField* field = &fields->At(i);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009015 VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
David Sehr709b0702016-10-13 09:12:37 -07009016 << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
9017 << " offset=" << field->GetOffsetDuringLinking();
Vladimir Marko76649e82014-11-10 18:32:59 +00009018 if (i != 0) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07009019 ArtField* const prev_field = &fields->At(i - 1);
Vladimir Marko7a7c1db2014-11-17 15:13:34 +00009020 // NOTE: The field names can be the same. This is not possible in the Java language
9021 // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07009022 DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
Vladimir Marko76649e82014-11-10 18:32:59 +00009023 }
9024 Primitive::Type type = field->GetTypeAsPrimitiveType();
9025 bool is_primitive = type != Primitive::kPrimNot;
9026 if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
9027 strcmp("referent", field->GetName()) == 0) {
9028 is_primitive = true; // We lied above, so we have to expect a lie here.
9029 }
9030 MemberOffset offset = field->GetOffsetDuringLinking();
9031 if (is_primitive) {
9032 if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
9033 // Shuffled before references.
9034 size_t type_size = Primitive::ComponentSize(type);
9035 CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
9036 CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
9037 CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
9038 CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
9039 }
9040 } else {
9041 CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
9042 current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
9043 sizeof(mirror::HeapReference<mirror::Object>));
9044 }
9045 }
9046 CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
9047 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009048 return true;
9049}
9050
Vladimir Marko42bee502021-01-28 14:58:35 +00009051bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
9052 CHECK(klass != nullptr);
9053 return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
9054}
9055
9056bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
9057 CHECK(klass != nullptr);
9058 return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
9059}
9060
Vladimir Marko76649e82014-11-10 18:32:59 +00009061// Set the bitmap of reference instance field offsets.
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07009062void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009063 uint32_t reference_offsets = 0;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009064 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009065 // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009066 if (super_class != nullptr) {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009067 reference_offsets = super_class->GetReferenceInstanceOffsets();
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009068 // Compute reference offsets unless our superclass overflowed.
9069 if (reference_offsets != mirror::Class::kClassWalkSuper) {
9070 size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
Vladimir Marko76649e82014-11-10 18:32:59 +00009071 if (num_reference_fields != 0u) {
9072 // All of the fields that contain object references are guaranteed be grouped in memory
9073 // starting at an appropriately aligned address after super class object data.
9074 uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
9075 sizeof(mirror::HeapReference<mirror::Object>));
9076 uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009077 sizeof(mirror::HeapReference<mirror::Object>);
Vladimir Marko76649e82014-11-10 18:32:59 +00009078 if (start_bit + num_reference_fields > 32) {
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009079 reference_offsets = mirror::Class::kClassWalkSuper;
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009080 } else {
Vladimir Marko76649e82014-11-10 18:32:59 +00009081 reference_offsets |= (0xffffffffu << start_bit) &
9082 (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009083 }
9084 }
Brian Carlstrom4873d462011-08-21 15:23:39 -07009085 }
9086 }
Mingyao Yangfaff0f02014-09-10 12:03:22 -07009087 klass->SetReferenceInstanceOffsets(reference_offsets);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009088}
9089
Vladimir Marko18090d12018-06-01 16:53:12 +01009090ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9091 ObjPtr<mirror::DexCache> dex_cache) {
9092 StackHandleScope<1> hs(Thread::Current());
9093 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
9094 return DoResolveString(string_idx, h_dex_cache);
9095}
9096
9097ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9098 Handle<mirror::DexCache> dex_cache) {
Vladimir Markoa64b52d2017-12-08 16:27:49 +00009099 const DexFile& dex_file = *dex_cache->GetDexFile();
Ian Rogersdfb325e2013-10-30 01:00:44 -07009100 uint32_t utf16_length;
9101 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009102 ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00009103 if (string != nullptr) {
9104 dex_cache->SetResolvedString(string_idx, string);
9105 }
Vladimir Marko28e012a2017-12-07 11:22:59 +00009106 return string;
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009107}
9108
Vladimir Marko18090d12018-06-01 16:53:12 +01009109ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
9110 ObjPtr<mirror::DexCache> dex_cache) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08009111 DCHECK(dex_cache != nullptr);
Vladimir Markoa64b52d2017-12-08 16:27:49 +00009112 const DexFile& dex_file = *dex_cache->GetDexFile();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009113 uint32_t utf16_length;
9114 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08009115 ObjPtr<mirror::String> string =
9116 intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009117 if (string != nullptr) {
9118 dex_cache->SetResolvedString(string_idx, string);
9119 }
Vladimir Marko28e012a2017-12-07 11:22:59 +00009120 return string;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009121}
9122
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009123ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
Vladimir Marko09c5ca42018-05-31 15:15:31 +01009124 ObjPtr<mirror::Class> referrer) {
9125 return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
9126}
9127
9128ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009129 ObjPtr<mirror::DexCache> dex_cache,
9130 ObjPtr<mirror::ClassLoader> class_loader) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009131 DCHECK(dex_cache->GetClassLoader() == class_loader);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009132 const DexFile& dex_file = *dex_cache->GetDexFile();
9133 const char* descriptor = dex_file.StringByTypeIdx(type_idx);
Vladimir Markoc63d9672021-03-31 15:50:39 +01009134 ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
9135 if (type != nullptr) {
9136 DCHECK(type->IsResolved());
9137 dex_cache->SetResolvedType(type_idx, type);
9138 }
9139 return type;
9140}
9141
9142ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(const char* descriptor,
9143 ObjPtr<mirror::ClassLoader> class_loader) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009144 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
9145 ObjPtr<mirror::Class> type = nullptr;
9146 if (descriptor[1] == '\0') {
9147 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
9148 // for primitive classes that aren't backed by dex files.
Vladimir Marko9186b182018-11-06 14:55:54 +00009149 type = LookupPrimitiveClass(descriptor[0]);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009150 } else {
9151 Thread* const self = Thread::Current();
9152 DCHECK(self != nullptr);
9153 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
9154 // Find the class in the loaded classes table.
Vladimir Markobcf17522018-06-01 13:14:32 +01009155 type = LookupClass(self, descriptor, hash, class_loader);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009156 }
Vladimir Markoc63d9672021-03-31 15:50:39 +01009157 return (type != nullptr && type->IsResolved()) ? type : nullptr;
Mathieu Chartierb8901302016-09-30 10:27:43 -07009158}
9159
Andreas Gampeb0625e02019-05-01 12:43:31 -07009160template <typename RefType>
9161ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
Vladimir Marko09c5ca42018-05-31 15:15:31 +01009162 StackHandleScope<2> hs(Thread::Current());
9163 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9164 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
9165 return DoResolveType(type_idx, dex_cache, class_loader);
9166}
9167
Andreas Gampe4835d212018-11-21 14:55:10 -08009168// Instantiate the above.
9169template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9170 ArtField* referrer);
9171template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9172 ArtMethod* referrer);
9173template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9174 ObjPtr<mirror::Class> referrer);
9175
Vladimir Marko09c5ca42018-05-31 15:15:31 +01009176ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009177 Handle<mirror::DexCache> dex_cache,
9178 Handle<mirror::ClassLoader> class_loader) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009179 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009180 Thread* self = Thread::Current();
9181 const char* descriptor = dex_cache->GetDexFile()->StringByTypeIdx(type_idx);
9182 ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
9183 if (resolved != nullptr) {
9184 // TODO: we used to throw here if resolved's class loader was not the
9185 // boot class loader. This was to permit different classes with the
9186 // same name to be loaded simultaneously by different loaders
9187 dex_cache->SetResolvedType(type_idx, resolved);
9188 } else {
9189 CHECK(self->IsExceptionPending())
9190 << "Expected pending exception for failed resolution of: " << descriptor;
9191 // Convert a ClassNotFoundException to a NoClassDefFoundError.
9192 StackHandleScope<1> hs(self);
9193 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01009194 if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009195 DCHECK(resolved == nullptr); // No Handle needed to preserve resolved.
9196 self->ClearException();
9197 ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
9198 self->GetException()->SetCause(cause.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009199 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009200 }
Vladimir Marko72ab6842017-01-20 19:32:50 +00009201 DCHECK((resolved == nullptr) || resolved->IsResolved())
David Sehr709b0702016-10-13 09:12:37 -07009202 << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
Vladimir Marko28e012a2017-12-07 11:22:59 +00009203 return resolved;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009204}
9205
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009206ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
9207 ObjPtr<mirror::DexCache> dex_cache,
9208 ObjPtr<mirror::ClassLoader> class_loader,
9209 uint32_t method_idx) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009210 DCHECK(dex_cache->GetClassLoader() == class_loader);
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009211 // Search for the method using dex_cache and method_idx. The Class::Find*Method()
9212 // functions can optimize the search if the dex_cache is the same as the DexCache
9213 // of the class, with fall-back to name and signature search otherwise.
9214 ArtMethod* resolved = nullptr;
9215 if (klass->IsInterface()) {
9216 resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9217 } else {
9218 resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9219 }
9220 DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009221 if (resolved != nullptr &&
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009222 // We pass AccessMethod::kNone instead of kLinking to not warn yet on the
9223 // access, as we'll be looking if the method can be accessed through an
9224 // interface.
David Brazdilf50ac102018-10-17 18:00:06 +01009225 hiddenapi::ShouldDenyAccessToMember(resolved,
9226 hiddenapi::AccessContext(class_loader, dex_cache),
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009227 hiddenapi::AccessMethod::kNone)) {
9228 // The resolved method that we have found cannot be accessed due to
9229 // hiddenapi (typically it is declared up the hierarchy and is not an SDK
9230 // method). Try to find an interface method from the implemented interfaces which is
Nicolas Geoffrayaf61f502021-03-31 16:03:50 +00009231 // part of the SDK.
9232 ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009233 if (itf_method == nullptr) {
9234 // No interface method. Call ShouldDenyAccessToMember again but this time
9235 // with AccessMethod::kLinking to ensure that an appropriate warning is
9236 // logged.
9237 hiddenapi::ShouldDenyAccessToMember(resolved,
9238 hiddenapi::AccessContext(class_loader, dex_cache),
9239 hiddenapi::AccessMethod::kLinking);
9240 resolved = nullptr;
9241 } else {
9242 // We found an interface method that is accessible, continue with the resolved method.
9243 }
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009244 }
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009245 if (resolved != nullptr) {
9246 // In case of jmvti, the dex file gets verified before being registered, so first
9247 // check if it's registered before checking class tables.
9248 const DexFile& dex_file = *dex_cache->GetDexFile();
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00009249 DCHECK_IMPLIES(
9250 IsDexFileRegistered(Thread::Current(), dex_file),
9251 FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009252 << "DexFile referrer: " << dex_file.GetLocation()
9253 << " ClassLoader: " << DescribeLoaders(class_loader, "");
9254 // Be a good citizen and update the dex cache to speed subsequent calls.
David Srbecky5de5efe2021-02-15 21:23:00 +00009255 dex_cache->SetResolvedMethod(method_idx, resolved);
Nicolas Geoffraybefa3092018-02-22 14:50:01 +00009256 // Disable the following invariant check as the verifier breaks it. b/73760543
9257 // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
9258 // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
9259 // << "Method: " << resolved->PrettyMethod() << ", "
9260 // << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
9261 // << "DexFile referrer: " << dex_file.GetLocation();
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009262 }
9263 return resolved;
9264}
9265
David Brazdil4525e0b2018-04-05 16:57:32 +01009266// Returns true if `method` is either null or hidden.
9267// Does not print any warnings if it is hidden.
9268static bool CheckNoSuchMethod(ArtMethod* method,
9269 ObjPtr<mirror::DexCache> dex_cache,
9270 ObjPtr<mirror::ClassLoader> class_loader)
9271 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffrayc07f4882021-09-13 09:20:33 +01009272 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
David Brazdil4525e0b2018-04-05 16:57:32 +01009273 return method == nullptr ||
David Brazdilf50ac102018-10-17 18:00:06 +01009274 hiddenapi::ShouldDenyAccessToMember(method,
9275 hiddenapi::AccessContext(class_loader, dex_cache),
9276 hiddenapi::AccessMethod::kNone); // no warnings
David Brazdil4525e0b2018-04-05 16:57:32 +01009277}
9278
9279ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
9280 ObjPtr<mirror::DexCache> dex_cache,
9281 ObjPtr<mirror::ClassLoader> class_loader,
9282 uint32_t method_idx) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009283 DCHECK(dex_cache->GetClassLoader() == class_loader);
David Brazdil4525e0b2018-04-05 16:57:32 +01009284 if (klass->IsInterface()) {
9285 ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9286 return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
9287 } else {
9288 // If there was an interface method with the same signature, we would have
9289 // found it in the "copied" methods. Only DCHECK that the interface method
9290 // really does not exist.
9291 if (kIsDebugBuild) {
9292 ArtMethod* method =
9293 klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
Nicolas Geoffray24642182022-02-14 14:40:51 +00009294 CHECK(CheckNoSuchMethod(method, dex_cache, class_loader) ||
9295 (klass->FindAccessibleInterfaceMethod(method, image_pointer_size_) == nullptr));
David Brazdil4525e0b2018-04-05 16:57:32 +01009296 }
9297 return nullptr;
9298 }
9299}
9300
Vladimir Marko89011192017-12-11 13:45:05 +00009301ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
Jeff Hao13e748b2015-08-25 20:44:19 +00009302 Handle<mirror::DexCache> dex_cache,
9303 Handle<mirror::ClassLoader> class_loader) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009304 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
David Srbecky5de5efe2021-02-15 21:23:00 +00009305 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009306 Thread::PoisonObjectPointersIfDebug();
Vladimir Marko07bfbac2017-07-06 14:55:02 +01009307 if (resolved != nullptr) {
9308 DCHECK(!resolved->IsRuntimeMethod());
Jeff Hao13e748b2015-08-25 20:44:19 +00009309 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
9310 return resolved;
9311 }
9312 // Fail, get the declaring class.
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009313 const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009314 ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
Jeff Hao13e748b2015-08-25 20:44:19 +00009315 if (klass == nullptr) {
9316 Thread::Current()->AssertPendingException();
9317 return nullptr;
9318 }
9319 if (klass->IsInterface()) {
Vladimir Markoba118822017-06-12 15:41:56 +01009320 resolved = klass->FindInterfaceMethod(dex_cache.Get(), method_idx, image_pointer_size_);
9321 } else {
9322 resolved = klass->FindClassMethod(dex_cache.Get(), method_idx, image_pointer_size_);
Jeff Hao13e748b2015-08-25 20:44:19 +00009323 }
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009324 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009325 hiddenapi::ShouldDenyAccessToMember(
9326 resolved,
9327 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
9328 hiddenapi::AccessMethod::kLinking)) {
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009329 resolved = nullptr;
9330 }
Jeff Hao13e748b2015-08-25 20:44:19 +00009331 return resolved;
9332}
9333
Vladimir Markof44d36c2017-03-14 14:18:46 +00009334ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
9335 ObjPtr<mirror::DexCache> dex_cache,
9336 ObjPtr<mirror::ClassLoader> class_loader,
9337 bool is_static) {
Nicolas Geoffrayc07f4882021-09-13 09:20:33 +01009338 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
Vladimir Markof44d36c2017-03-14 14:18:46 +00009339 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009340 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009341 ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
9342 if (klass == nullptr) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009343 klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009344 }
9345 if (klass == nullptr) {
9346 // The class has not been resolved yet, so the field is also unresolved.
9347 return nullptr;
9348 }
9349 DCHECK(klass->IsResolved());
Vladimir Markof44d36c2017-03-14 14:18:46 +00009350
David Brazdil1ab0fa82018-05-04 11:28:03 +01009351 return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009352}
9353
Vladimir Markoe11dd502017-12-08 14:09:45 +00009354ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
Mathieu Chartierc7853442015-03-27 14:35:38 -07009355 Handle<mirror::DexCache> dex_cache,
9356 Handle<mirror::ClassLoader> class_loader) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08009357 DCHECK(dex_cache != nullptr);
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009358 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
David Srbecky5de5efe2021-02-15 21:23:00 +00009359 ArtField* resolved = dex_cache->GetResolvedField(field_idx);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009360 Thread::PoisonObjectPointersIfDebug();
Andreas Gampe58a5af82014-07-31 16:23:49 -07009361 if (resolved != nullptr) {
Ian Rogersb067ac22011-12-13 18:05:09 -08009362 return resolved;
9363 }
Vladimir Markoe11dd502017-12-08 14:09:45 +00009364 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009365 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009366 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009367 if (klass == nullptr) {
Ian Rogersb067ac22011-12-13 18:05:09 -08009368 DCHECK(Thread::Current()->IsExceptionPending());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009369 return nullptr;
Ian Rogersb067ac22011-12-13 18:05:09 -08009370 }
9371
David Brazdil1ab0fa82018-05-04 11:28:03 +01009372 resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
9373 if (resolved == nullptr) {
9374 const char* name = dex_file.GetFieldName(field_id);
9375 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009376 ThrowNoSuchFieldError("", klass, type, name);
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009377 }
9378 return resolved;
Carl Shapiro5fafe2b2011-07-09 15:34:41 -07009379}
9380
David Brazdil1ab0fa82018-05-04 11:28:03 +01009381ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
9382 ObjPtr<mirror::DexCache> dex_cache,
9383 ObjPtr<mirror::ClassLoader> class_loader,
9384 uint32_t field_idx,
9385 bool is_static) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009386 DCHECK(dex_cache->GetClassLoader() == class_loader);
Vladimir Markob10668c2021-06-10 09:52:53 +01009387 ArtField* resolved = is_static ? klass->FindStaticField(dex_cache, field_idx)
9388 : klass->FindInstanceField(dex_cache, field_idx);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009389 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009390 hiddenapi::ShouldDenyAccessToMember(resolved,
9391 hiddenapi::AccessContext(class_loader, dex_cache),
9392 hiddenapi::AccessMethod::kLinking)) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01009393 resolved = nullptr;
9394 }
9395
9396 if (resolved != nullptr) {
David Srbecky5de5efe2021-02-15 21:23:00 +00009397 dex_cache->SetResolvedField(field_idx, resolved);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009398 }
9399
9400 return resolved;
9401}
9402
9403ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
9404 ObjPtr<mirror::DexCache> dex_cache,
9405 ObjPtr<mirror::ClassLoader> class_loader,
9406 uint32_t field_idx) {
Nicolas Geoffrayc07f4882021-09-13 09:20:33 +01009407 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
Vladimir Markob10668c2021-06-10 09:52:53 +01009408 ArtField* resolved = klass->FindField(dex_cache, field_idx);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009409
9410 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009411 hiddenapi::ShouldDenyAccessToMember(resolved,
9412 hiddenapi::AccessContext(class_loader, dex_cache),
9413 hiddenapi::AccessMethod::kLinking)) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01009414 resolved = nullptr;
9415 }
9416
9417 if (resolved != nullptr) {
David Srbecky5de5efe2021-02-15 21:23:00 +00009418 dex_cache->SetResolvedField(field_idx, resolved);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009419 }
9420
9421 return resolved;
9422}
9423
Vladimir Markoaf940202017-12-08 15:01:18 +00009424ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
9425 Thread* self,
Orion Hodson06d10a72018-05-14 08:53:38 +01009426 dex::ProtoIndex proto_idx,
Vladimir Markoaf940202017-12-08 15:01:18 +00009427 Handle<mirror::DexCache> dex_cache,
9428 Handle<mirror::ClassLoader> class_loader) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009429 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
Andreas Gampefa4333d2017-02-14 11:10:34 -08009430 DCHECK(dex_cache != nullptr);
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009431 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
Narayan Kamath25352fc2016-08-03 12:46:58 +01009432
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009433 ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009434 if (resolved != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01009435 return resolved;
Narayan Kamath25352fc2016-08-03 12:46:58 +01009436 }
9437
Narayan Kamath25352fc2016-08-03 12:46:58 +01009438 StackHandleScope<4> hs(self);
9439
9440 // First resolve the return type.
Vladimir Markoaf940202017-12-08 15:01:18 +00009441 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009442 const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009443 Handle<mirror::Class> return_type(hs.NewHandle(
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009444 ResolveType(proto_id.return_type_idx_, dex_cache, class_loader)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009445 if (return_type == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009446 DCHECK(self->IsExceptionPending());
9447 return nullptr;
9448 }
9449
9450 // Then resolve the argument types.
9451 //
9452 // TODO: Is there a better way to figure out the number of method arguments
9453 // other than by looking at the shorty ?
9454 const size_t num_method_args = strlen(dex_file.StringDataByIdx(proto_id.shorty_idx_)) - 1;
9455
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009456 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009457 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9458 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009459 if (method_params == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009460 DCHECK(self->IsExceptionPending());
9461 return nullptr;
9462 }
9463
9464 DexFileParameterIterator it(dex_file, proto_id);
9465 int32_t i = 0;
9466 MutableHandle<mirror::Class> param_class = hs.NewHandle<mirror::Class>(nullptr);
9467 for (; it.HasNext(); it.Next()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08009468 const dex::TypeIndex type_idx = it.GetTypeIdx();
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009469 param_class.Assign(ResolveType(type_idx, dex_cache, class_loader));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009470 if (param_class == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009471 DCHECK(self->IsExceptionPending());
9472 return nullptr;
9473 }
9474
9475 method_params->Set(i++, param_class.Get());
9476 }
9477
9478 DCHECK(!it.HasNext());
9479
9480 Handle<mirror::MethodType> type = hs.NewHandle(
9481 mirror::MethodType::Create(self, return_type, method_params));
Nicolas Geoffray4aebd582021-07-23 15:27:31 +01009482 if (type != nullptr) {
9483 dex_cache->SetResolvedMethodType(proto_idx, type.Get());
9484 }
Narayan Kamath25352fc2016-08-03 12:46:58 +01009485
9486 return type.Get();
9487}
9488
Vladimir Markoaf940202017-12-08 15:01:18 +00009489ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
Orion Hodson06d10a72018-05-14 08:53:38 +01009490 dex::ProtoIndex proto_idx,
Vladimir Markoaf940202017-12-08 15:01:18 +00009491 ArtMethod* referrer) {
Orion Hodson2e599942017-09-22 16:17:41 +01009492 StackHandleScope<2> hs(self);
Orion Hodson2e599942017-09-22 16:17:41 +01009493 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9494 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
Vladimir Markoaf940202017-12-08 15:01:18 +00009495 return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
Orion Hodson2e599942017-09-22 16:17:41 +01009496}
9497
Vladimir Marko5aead702019-03-27 11:00:36 +00009498ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009499 Thread* self,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009500 const dex::MethodHandleItem& method_handle,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009501 ArtMethod* referrer) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009502 DexFile::MethodHandleType handle_type =
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009503 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9504 mirror::MethodHandle::Kind kind;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009505 bool is_put;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009506 bool is_static;
9507 int32_t num_params;
Orion Hodsonc069a302017-01-18 09:23:12 +00009508 switch (handle_type) {
9509 case DexFile::MethodHandleType::kStaticPut: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009510 kind = mirror::MethodHandle::Kind::kStaticPut;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009511 is_put = true;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009512 is_static = true;
9513 num_params = 1;
Orion Hodson631827d2017-04-10 14:53:47 +01009514 break;
9515 }
9516 case DexFile::MethodHandleType::kStaticGet: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009517 kind = mirror::MethodHandle::Kind::kStaticGet;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009518 is_put = false;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009519 is_static = true;
9520 num_params = 0;
Orion Hodson631827d2017-04-10 14:53:47 +01009521 break;
9522 }
9523 case DexFile::MethodHandleType::kInstancePut: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009524 kind = mirror::MethodHandle::Kind::kInstancePut;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009525 is_put = true;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009526 is_static = false;
Orion Hodsonc069a302017-01-18 09:23:12 +00009527 num_params = 2;
9528 break;
9529 }
9530 case DexFile::MethodHandleType::kInstanceGet: {
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009531 kind = mirror::MethodHandle::Kind::kInstanceGet;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009532 is_put = false;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009533 is_static = false;
Orion Hodsonc069a302017-01-18 09:23:12 +00009534 num_params = 1;
9535 break;
9536 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009537 case DexFile::MethodHandleType::kInvokeStatic:
Orion Hodson82b351f2017-07-05 14:34:25 +01009538 case DexFile::MethodHandleType::kInvokeInstance:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009539 case DexFile::MethodHandleType::kInvokeConstructor:
Orion Hodson82b351f2017-07-05 14:34:25 +01009540 case DexFile::MethodHandleType::kInvokeDirect:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009541 case DexFile::MethodHandleType::kInvokeInterface:
9542 UNREACHABLE();
Orion Hodsonc069a302017-01-18 09:23:12 +00009543 }
9544
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009545 ArtField* target_field =
9546 ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
9547 if (LIKELY(target_field != nullptr)) {
9548 ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
9549 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9550 if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
9551 ThrowIllegalAccessErrorField(referring_class, target_field);
9552 return nullptr;
9553 }
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009554 if (UNLIKELY(is_put && target_field->IsFinal())) {
9555 ThrowIllegalAccessErrorField(referring_class, target_field);
9556 return nullptr;
9557 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009558 } else {
9559 DCHECK(Thread::Current()->IsExceptionPending());
9560 return nullptr;
9561 }
9562
9563 StackHandleScope<4> hs(self);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009564 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Orion Hodsonc069a302017-01-18 09:23:12 +00009565 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9566 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009567 if (UNLIKELY(method_params == nullptr)) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009568 DCHECK(self->IsExceptionPending());
9569 return nullptr;
9570 }
9571
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009572 Handle<mirror::Class> constructor_class;
Orion Hodsonc069a302017-01-18 09:23:12 +00009573 Handle<mirror::Class> return_type;
9574 switch (handle_type) {
9575 case DexFile::MethodHandleType::kStaticPut: {
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009576 method_params->Set(0, target_field->ResolveType());
Vladimir Marko9186b182018-11-06 14:55:54 +00009577 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
Orion Hodsonc069a302017-01-18 09:23:12 +00009578 break;
9579 }
9580 case DexFile::MethodHandleType::kStaticGet: {
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009581 return_type = hs.NewHandle(target_field->ResolveType());
Orion Hodsonc069a302017-01-18 09:23:12 +00009582 break;
9583 }
9584 case DexFile::MethodHandleType::kInstancePut: {
Orion Hodson631827d2017-04-10 14:53:47 +01009585 method_params->Set(0, target_field->GetDeclaringClass());
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009586 method_params->Set(1, target_field->ResolveType());
Vladimir Marko9186b182018-11-06 14:55:54 +00009587 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
Orion Hodsonc069a302017-01-18 09:23:12 +00009588 break;
9589 }
9590 case DexFile::MethodHandleType::kInstanceGet: {
Orion Hodson631827d2017-04-10 14:53:47 +01009591 method_params->Set(0, target_field->GetDeclaringClass());
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009592 return_type = hs.NewHandle(target_field->ResolveType());
Orion Hodsonc069a302017-01-18 09:23:12 +00009593 break;
9594 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009595 case DexFile::MethodHandleType::kInvokeStatic:
Orion Hodson631827d2017-04-10 14:53:47 +01009596 case DexFile::MethodHandleType::kInvokeInstance:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009597 case DexFile::MethodHandleType::kInvokeConstructor:
9598 case DexFile::MethodHandleType::kInvokeDirect:
Orion Hodson631827d2017-04-10 14:53:47 +01009599 case DexFile::MethodHandleType::kInvokeInterface:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009600 UNREACHABLE();
9601 }
9602
9603 for (int32_t i = 0; i < num_params; ++i) {
9604 if (UNLIKELY(method_params->Get(i) == nullptr)) {
9605 DCHECK(self->IsExceptionPending());
9606 return nullptr;
Orion Hodsonc069a302017-01-18 09:23:12 +00009607 }
9608 }
9609
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009610 if (UNLIKELY(return_type.IsNull())) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009611 DCHECK(self->IsExceptionPending());
9612 return nullptr;
9613 }
9614
9615 Handle<mirror::MethodType>
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009616 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9617 if (UNLIKELY(method_type.IsNull())) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009618 DCHECK(self->IsExceptionPending());
9619 return nullptr;
9620 }
Orion Hodson631827d2017-04-10 14:53:47 +01009621
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009622 uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
9623 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9624}
9625
Vladimir Marko5aead702019-03-27 11:00:36 +00009626ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009627 Thread* self,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009628 const dex::MethodHandleItem& method_handle,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009629 ArtMethod* referrer) {
9630 DexFile::MethodHandleType handle_type =
9631 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9632 mirror::MethodHandle::Kind kind;
9633 uint32_t receiver_count = 0;
9634 ArtMethod* target_method = nullptr;
9635 switch (handle_type) {
9636 case DexFile::MethodHandleType::kStaticPut:
9637 case DexFile::MethodHandleType::kStaticGet:
9638 case DexFile::MethodHandleType::kInstancePut:
9639 case DexFile::MethodHandleType::kInstanceGet:
9640 UNREACHABLE();
9641 case DexFile::MethodHandleType::kInvokeStatic: {
9642 kind = mirror::MethodHandle::Kind::kInvokeStatic;
9643 receiver_count = 0;
Vladimir Markoba118822017-06-12 15:41:56 +01009644 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9645 method_handle.field_or_method_idx_,
9646 referrer,
9647 InvokeType::kStatic);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009648 break;
9649 }
9650 case DexFile::MethodHandleType::kInvokeInstance: {
9651 kind = mirror::MethodHandle::Kind::kInvokeVirtual;
9652 receiver_count = 1;
Vladimir Markoba118822017-06-12 15:41:56 +01009653 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9654 method_handle.field_or_method_idx_,
9655 referrer,
9656 InvokeType::kVirtual);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009657 break;
9658 }
9659 case DexFile::MethodHandleType::kInvokeConstructor: {
9660 // Constructors are currently implemented as a transform. They
9661 // are special cased later in this method.
9662 kind = mirror::MethodHandle::Kind::kInvokeTransform;
9663 receiver_count = 0;
Vladimir Markoba118822017-06-12 15:41:56 +01009664 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9665 method_handle.field_or_method_idx_,
9666 referrer,
9667 InvokeType::kDirect);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009668 break;
9669 }
9670 case DexFile::MethodHandleType::kInvokeDirect: {
9671 kind = mirror::MethodHandle::Kind::kInvokeDirect;
9672 receiver_count = 1;
9673 StackHandleScope<2> hs(self);
9674 // A constant method handle with type kInvokeDirect can refer to
9675 // a method that is private or to a method in a super class. To
9676 // disambiguate the two options, we resolve the method ignoring
9677 // the invocation type to determine if the method is private. We
9678 // then resolve again specifying the intended invocation type to
9679 // force the appropriate checks.
Vladimir Marko89011192017-12-11 13:45:05 +00009680 target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009681 hs.NewHandle(referrer->GetDexCache()),
9682 hs.NewHandle(referrer->GetClassLoader()));
9683 if (UNLIKELY(target_method == nullptr)) {
9684 break;
9685 }
9686
9687 if (target_method->IsPrivate()) {
9688 kind = mirror::MethodHandle::Kind::kInvokeDirect;
Vladimir Markoba118822017-06-12 15:41:56 +01009689 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9690 method_handle.field_or_method_idx_,
9691 referrer,
9692 InvokeType::kDirect);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009693 } else {
9694 kind = mirror::MethodHandle::Kind::kInvokeSuper;
Vladimir Markoba118822017-06-12 15:41:56 +01009695 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9696 method_handle.field_or_method_idx_,
9697 referrer,
9698 InvokeType::kSuper);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009699 if (UNLIKELY(target_method == nullptr)) {
9700 break;
9701 }
9702 // Find the method specified in the parent in referring class
9703 // so invoke-super invokes the method in the parent of the
9704 // referrer.
9705 target_method =
9706 referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
9707 kRuntimePointerSize);
9708 }
9709 break;
9710 }
9711 case DexFile::MethodHandleType::kInvokeInterface: {
9712 kind = mirror::MethodHandle::Kind::kInvokeInterface;
9713 receiver_count = 1;
Vladimir Markoba118822017-06-12 15:41:56 +01009714 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9715 method_handle.field_or_method_idx_,
9716 referrer,
9717 InvokeType::kInterface);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009718 break;
9719 }
Orion Hodson631827d2017-04-10 14:53:47 +01009720 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009721
9722 if (UNLIKELY(target_method == nullptr)) {
9723 DCHECK(Thread::Current()->IsExceptionPending());
9724 return nullptr;
9725 }
9726
9727 ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
9728 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9729 uint32_t access_flags = target_method->GetAccessFlags();
9730 if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
9731 ThrowIllegalAccessErrorMethod(referring_class, target_method);
9732 return nullptr;
9733 }
9734
9735 // Calculate the number of parameters from the method shorty. We add the
9736 // receiver count (0 or 1) and deduct one for the return value.
9737 uint32_t shorty_length;
9738 target_method->GetShorty(&shorty_length);
9739 int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
9740
Orion Hodsonecd58562018-09-24 11:27:33 +01009741 StackHandleScope<5> hs(self);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009742 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009743 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9744 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9745 if (method_params.Get() == nullptr) {
9746 DCHECK(self->IsExceptionPending());
9747 return nullptr;
9748 }
9749
Orion Hodsonecd58562018-09-24 11:27:33 +01009750 const DexFile* dex_file = referrer->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009751 const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009752 int32_t index = 0;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009753 if (receiver_count != 0) {
Orion Hodsonecd58562018-09-24 11:27:33 +01009754 // Insert receiver. Use the class identified in the method handle rather than the declaring
9755 // class of the resolved method which may be super class or default interface method
9756 // (b/115964401).
9757 ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
9758 // receiver_class should have been resolved when resolving the target method.
9759 DCHECK(receiver_class != nullptr);
9760 method_params->Set(index++, receiver_class);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009761 }
Orion Hodsonecd58562018-09-24 11:27:33 +01009762
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009763 const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
Orion Hodsonecd58562018-09-24 11:27:33 +01009764 DexFileParameterIterator it(*dex_file, proto_id);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009765 while (it.HasNext()) {
Orion Hodsonda1cdd02018-01-31 18:08:28 +00009766 DCHECK_LT(index, num_params);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009767 const dex::TypeIndex type_idx = it.GetTypeIdx();
Orion Hodsonecd58562018-09-24 11:27:33 +01009768 ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009769 if (nullptr == klass) {
9770 DCHECK(self->IsExceptionPending());
9771 return nullptr;
9772 }
9773 method_params->Set(index++, klass);
9774 it.Next();
9775 }
9776
Orion Hodsonecd58562018-09-24 11:27:33 +01009777 Handle<mirror::Class> return_type =
9778 hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009779 if (UNLIKELY(return_type.IsNull())) {
9780 DCHECK(self->IsExceptionPending());
9781 return nullptr;
9782 }
9783
9784 Handle<mirror::MethodType>
9785 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9786 if (UNLIKELY(method_type.IsNull())) {
9787 DCHECK(self->IsExceptionPending());
9788 return nullptr;
9789 }
9790
9791 if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
9792 Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
9793 Handle<mirror::MethodHandlesLookup> lookup =
9794 hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
9795 return lookup->FindConstructor(self, constructor_class, method_type);
9796 }
9797
9798 uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
9799 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9800}
9801
Vladimir Markoaf940202017-12-08 15:01:18 +00009802ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
9803 uint32_t method_handle_idx,
9804 ArtMethod* referrer)
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009805 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009806 const DexFile* const dex_file = referrer->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009807 const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009808 switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
9809 case DexFile::MethodHandleType::kStaticPut:
9810 case DexFile::MethodHandleType::kStaticGet:
9811 case DexFile::MethodHandleType::kInstancePut:
9812 case DexFile::MethodHandleType::kInstanceGet:
9813 return ResolveMethodHandleForField(self, method_handle, referrer);
9814 case DexFile::MethodHandleType::kInvokeStatic:
9815 case DexFile::MethodHandleType::kInvokeInstance:
9816 case DexFile::MethodHandleType::kInvokeConstructor:
9817 case DexFile::MethodHandleType::kInvokeDirect:
9818 case DexFile::MethodHandleType::kInvokeInterface:
Orion Hodsonda1cdd02018-01-31 18:08:28 +00009819 return ResolveMethodHandleForMethod(self, method_handle, referrer);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009820 }
Orion Hodsonc069a302017-01-18 09:23:12 +00009821}
9822
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009823bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
9824 return (entry_point == GetQuickResolutionStub()) ||
9825 (quick_resolution_trampoline_ == entry_point);
9826}
9827
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009828bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
9829 return (entry_point == GetQuickToInterpreterBridge()) ||
9830 (quick_to_interpreter_bridge_trampoline_ == entry_point);
9831}
9832
9833bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
9834 return (entry_point == GetQuickGenericJniStub()) ||
9835 (quick_generic_jni_trampoline_ == entry_point);
9836}
9837
David Sehra49e0532017-08-25 08:05:29 -07009838bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
Vladimir Marko7dac8642019-11-06 17:09:30 +00009839 return entry_point == GetJniDlsymLookupStub() ||
9840 (jni_dlsym_lookup_trampoline_ == entry_point);
David Sehra49e0532017-08-25 08:05:29 -07009841}
9842
Vladimir Markofa458ac2020-02-12 14:08:07 +00009843bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
9844 return entry_point == GetJniDlsymLookupCriticalStub() ||
9845 (jni_dlsym_lookup_critical_trampoline_ == entry_point);
9846}
9847
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009848const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
9849 return GetQuickGenericJniStub();
9850}
9851
Alex Lightdb01a092017-04-03 15:39:55 -07009852void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
9853 DCHECK(method->IsObsolete());
9854 // We cannot mess with the entrypoints of native methods because they are used to determine how
9855 // large the method's quick stack frame is. Without this information we cannot walk the stacks.
9856 if (!method->IsNative()) {
9857 method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
9858 }
9859}
9860
Ian Rogers7dfb28c2013-08-22 08:18:36 -07009861void ClassLinker::DumpForSigQuit(std::ostream& os) {
Hans Boehmfb3ad722021-08-16 16:53:17 +00009862 ScopedObjectAccess soa(Thread::Current());
9863 ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009864 os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
9865 << NumNonZygoteClasses() << "\n";
Hans Boehmfb3ad722021-08-16 16:53:17 +00009866 ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
Nicolas Geoffraya90c9222018-09-07 13:19:19 +01009867 os << "Dumping registered class loaders\n";
9868 size_t class_loader_index = 0;
9869 for (const ClassLoaderData& class_loader : class_loaders_) {
9870 ObjPtr<mirror::ClassLoader> loader =
Hans Boehmfb3ad722021-08-16 16:53:17 +00009871 ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
Nicolas Geoffraya90c9222018-09-07 13:19:19 +01009872 if (loader != nullptr) {
9873 os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
9874 bool saw_one_dex_file = false;
David Srbecky6fbcc292021-02-23 01:05:32 +00009875 for (const auto& entry : dex_caches_) {
9876 const DexCacheData& dex_cache = entry.second;
9877 if (dex_cache.class_table == class_loader.class_table) {
Nicolas Geoffraya90c9222018-09-07 13:19:19 +01009878 if (saw_one_dex_file) {
9879 os << ":";
9880 }
9881 saw_one_dex_file = true;
David Srbecky6fbcc292021-02-23 01:05:32 +00009882 os << entry.first->GetLocation();
Nicolas Geoffraya90c9222018-09-07 13:19:19 +01009883 }
9884 }
9885 os << "]";
9886 bool found_parent = false;
9887 if (loader->GetParent() != nullptr) {
9888 size_t parent_index = 0;
9889 for (const ClassLoaderData& class_loader2 : class_loaders_) {
9890 ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
Hans Boehmfb3ad722021-08-16 16:53:17 +00009891 soa.Self()->DecodeJObject(class_loader2.weak_root));
Nicolas Geoffraya90c9222018-09-07 13:19:19 +01009892 if (loader2 == loader->GetParent()) {
9893 os << ", parent #" << parent_index;
9894 found_parent = true;
9895 break;
9896 }
9897 parent_index++;
9898 }
9899 if (!found_parent) {
9900 os << ", unregistered parent of type "
9901 << loader->GetParent()->GetClass()->PrettyDescriptor();
9902 }
9903 } else {
9904 os << ", no parent";
9905 }
9906 os << "\n";
9907 }
9908 }
9909 os << "Done dumping class loaders\n";
Andreas Gampe9b7f8b52019-06-07 08:59:29 -07009910 Runtime* runtime = Runtime::Current();
9911 os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
9912 << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009913}
9914
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009915class CountClassesVisitor : public ClassLoaderVisitor {
9916 public:
9917 CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
9918
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009919 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01009920 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009921 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier6b069532015-08-05 15:08:12 -07009922 if (class_table != nullptr) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00009923 num_zygote_classes += class_table->NumZygoteClasses(class_loader);
9924 num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
Mathieu Chartier6b069532015-08-05 15:08:12 -07009925 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009926 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009927
9928 size_t num_zygote_classes;
9929 size_t num_non_zygote_classes;
9930};
9931
9932size_t ClassLinker::NumZygoteClasses() const {
9933 CountClassesVisitor visitor;
9934 VisitClassLoaders(&visitor);
Andreas Gampe2af99022017-04-25 08:32:59 -07009935 return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009936}
9937
9938size_t ClassLinker::NumNonZygoteClasses() const {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009939 CountClassesVisitor visitor;
9940 VisitClassLoaders(&visitor);
Andreas Gampe2af99022017-04-25 08:32:59 -07009941 return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
Elliott Hughescac6cc72011-11-03 20:31:21 -07009942}
9943
Ian Rogers7dfb28c2013-08-22 08:18:36 -07009944size_t ClassLinker::NumLoadedClasses() {
Ian Rogers1bf8d4d2013-05-30 00:18:49 -07009945 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Mathieu Chartierc2e20622014-11-03 11:41:47 -08009946 // Only return non zygote classes since these are the ones which apps which care about.
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009947 return NumNonZygoteClasses();
Elliott Hughese27955c2011-08-26 15:21:24 -07009948}
9949
Brian Carlstrom47d237a2011-10-18 15:08:33 -07009950pid_t ClassLinker::GetClassesLockOwner() {
Ian Rogersb726dcb2012-09-05 08:57:23 -07009951 return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
Brian Carlstrom47d237a2011-10-18 15:08:33 -07009952}
9953
9954pid_t ClassLinker::GetDexLockOwner() {
Andreas Gampecc1b5352016-12-01 16:58:38 -08009955 return Locks::dex_lock_->GetExclusiveOwnerTid();
Brian Carlstrom24a3c2e2011-10-17 18:07:52 -07009956}
9957
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009958void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08009959 DCHECK(!init_done_);
9960
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009961 DCHECK(klass != nullptr);
9962 DCHECK(klass->GetClassLoader() == nullptr);
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08009963
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07009964 mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009965 DCHECK(class_roots != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01009966 DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
9967 int32_t index = static_cast<int32_t>(class_root);
9968 DCHECK(class_roots->Get(index) == nullptr);
9969 class_roots->Set<false>(index, klass);
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009970}
9971
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009972ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
9973 Thread* self,
9974 const std::vector<const DexFile*>& dex_files,
9975 Handle<mirror::Class> loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +00009976 Handle<mirror::ClassLoader> parent_loader,
Brad Stenning9c924e82021-10-11 19:09:00 -07009977 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,
9978 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after) {
Calin Juravle7865ac72017-06-28 11:03:12 -07009979
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009980 StackHandleScope<5> hs(self);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009981
Mathieu Chartierc7853442015-03-27 14:35:38 -07009982 ArtField* dex_elements_field =
Andreas Gampe08883de2016-11-08 13:20:52 -08009983 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009984
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009985 Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009986 DCHECK(dex_elements_class != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009987 DCHECK(dex_elements_class->IsArrayClass());
Mathieu Chartierdaaf3262015-03-24 13:30:28 -07009988 Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
Mathieu Chartier3398c782016-09-30 10:27:43 -07009989 mirror::ObjectArray<mirror::Object>::Alloc(self,
9990 dex_elements_class.Get(),
9991 dex_files.size())));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009992 Handle<mirror::Class> h_dex_element_class =
9993 hs.NewHandle(dex_elements_class->GetComponentType());
9994
Mathieu Chartierc7853442015-03-27 14:35:38 -07009995 ArtField* element_file_field =
Andreas Gampe08883de2016-11-08 13:20:52 -08009996 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009997 DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009998
Andreas Gampe08883de2016-11-08 13:20:52 -08009999 ArtField* cookie_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
Vladimir Marko208f6702017-12-08 12:00:50 +000010000 DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010001
Andreas Gampe08883de2016-11-08 13:20:52 -080010002 ArtField* file_name_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_fileName);
Vladimir Marko208f6702017-12-08 12:00:50 +000010003 DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
Mathieu Chartierfbc31082016-01-24 11:59:56 -080010004
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010005 // Fill the elements array.
10006 int32_t index = 0;
10007 for (const DexFile* dex_file : dex_files) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080010008 StackHandleScope<4> hs2(self);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010009
Calin Juravle7865ac72017-06-28 11:03:12 -070010010 // CreateWellKnownClassLoader is only used by gtests and compiler.
10011 // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
Mathieu Chartiere58991b2015-10-13 07:59:34 -070010012 Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
10013 self,
10014 kDexFileIndexStart + 1));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010015 DCHECK(h_long_array != nullptr);
Vladimir Marko78baed52018-10-11 10:44:58 +010010016 h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010017
Mathieu Chartier3738e982017-05-12 16:07:28 -070010018 // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
10019 // FinalizerReference which will never get cleaned up without a started runtime.
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010020 Handle<mirror::Object> h_dex_file = hs2.NewHandle(
Mathieu Chartierc7853442015-03-27 14:35:38 -070010021 cookie_field->GetDeclaringClass()->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010022 DCHECK(h_dex_file != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -070010023 cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010024
Mathieu Chartierfbc31082016-01-24 11:59:56 -080010025 Handle<mirror::String> h_file_name = hs2.NewHandle(
10026 mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010027 DCHECK(h_file_name != nullptr);
Mathieu Chartierfbc31082016-01-24 11:59:56 -080010028 file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
10029
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010030 Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010031 DCHECK(h_element != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -070010032 element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010033
10034 h_dex_elements->Set(index, h_element.Get());
10035 index++;
10036 }
10037 DCHECK_EQ(index, h_dex_elements->GetLength());
10038
10039 // Create DexPathList.
10040 Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
Mathieu Chartierc7853442015-03-27 14:35:38 -070010041 dex_elements_field->GetDeclaringClass()->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010042 DCHECK(h_dex_path_list != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010043 // Set elements.
Mathieu Chartierc7853442015-03-27 14:35:38 -070010044 dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
Andreas Gampe473191c2017-12-28 16:55:31 -080010045 // Create an empty List for the "nativeLibraryDirectories," required for native tests.
10046 // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
10047 // elements.
10048 {
10049 ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
10050 FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
10051 DCHECK(native_lib_dirs != nullptr);
10052 ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
10053 DCHECK(list_class != nullptr);
10054 {
10055 StackHandleScope<1> h_list_scope(self);
10056 Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
10057 bool list_init = EnsureInitialized(self, h_list_class, true, true);
10058 DCHECK(list_init);
10059 list_class = h_list_class.Get();
10060 }
10061 ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
10062 // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
10063 // is fine for testing. While it violates a Java-code invariant (the elementData field is
10064 // normally never null), as long as one does not try to add elements, this will still
10065 // work.
10066 native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
10067 }
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010068
Calin Juravle7865ac72017-06-28 11:03:12 -070010069 // Create the class loader..
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010070 Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
10071 ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
Calin Juravle7865ac72017-06-28 11:03:12 -070010072 DCHECK(h_class_loader != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010073 // Set DexPathList.
Mathieu Chartierc7853442015-03-27 14:35:38 -070010074 ArtField* path_list_field =
Andreas Gampe08883de2016-11-08 13:20:52 -080010075 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList);
Mathieu Chartierc7853442015-03-27 14:35:38 -070010076 DCHECK(path_list_field != nullptr);
Calin Juravle7865ac72017-06-28 11:03:12 -070010077 path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010078
10079 // Make a pretend boot-classpath.
10080 // TODO: Should we scan the image?
Mathieu Chartierc7853442015-03-27 14:35:38 -070010081 ArtField* const parent_field =
Vladimir Markoe300c4e2021-06-08 16:00:05 +010010082 jni::DecodeArtField(WellKnownClasses::java_lang_ClassLoader_parent);
Roland Levillainf39c9eb2015-05-26 15:02:07 +010010083 DCHECK(parent_field != nullptr);
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010084 if (parent_loader.Get() == nullptr) {
10085 ScopedObjectAccessUnchecked soa(self);
10086 ObjPtr<mirror::Object> boot_loader(soa.Decode<mirror::Class>(
10087 WellKnownClasses::java_lang_BootClassLoader)->AllocObject(self));
10088 parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
10089 } else {
10090 parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
10091 }
Calin Juravle7865ac72017-06-28 11:03:12 -070010092
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010093 ArtField* shared_libraries_field =
10094 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
10095 DCHECK(shared_libraries_field != nullptr);
10096 shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
10097
Brad Stenning9c924e82021-10-11 19:09:00 -070010098 ArtField* shared_libraries_after_field =
10099 jni::DecodeArtField(
10100 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter);
10101 DCHECK(shared_libraries_after_field != nullptr);
10102 shared_libraries_after_field->SetObject<false>(h_class_loader.Get(),
10103 shared_libraries_after.Get());
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010104 return h_class_loader.Get();
10105}
10106
10107jobject ClassLinker::CreateWellKnownClassLoader(Thread* self,
10108 const std::vector<const DexFile*>& dex_files,
10109 jclass loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +000010110 jobject parent_loader,
Brad Stenning9c924e82021-10-11 19:09:00 -070010111 jobject shared_libraries,
10112 jobject shared_libraries_after) {
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010113 CHECK(self->GetJniEnv()->IsSameObject(loader_class,
10114 WellKnownClasses::dalvik_system_PathClassLoader) ||
10115 self->GetJniEnv()->IsSameObject(loader_class,
David Brazdil1a9ac532019-03-05 11:57:13 +000010116 WellKnownClasses::dalvik_system_DelegateLastClassLoader) ||
10117 self->GetJniEnv()->IsSameObject(loader_class,
10118 WellKnownClasses::dalvik_system_InMemoryDexClassLoader));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010119
10120 // SOAAlreadyRunnable is protected, and we need something to add a global reference.
10121 // We could move the jobject to the callers, but all call-sites do this...
10122 ScopedObjectAccessUnchecked soa(self);
10123
10124 // For now, create a libcore-level DexFile for each ART DexFile. This "explodes" multidex.
Brad Stenning9c924e82021-10-11 19:09:00 -070010125 StackHandleScope<5> hs(self);
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010126
10127 Handle<mirror::Class> h_loader_class =
10128 hs.NewHandle<mirror::Class>(soa.Decode<mirror::Class>(loader_class));
Nicolas Geoffraye1672732018-11-30 01:09:49 +000010129 Handle<mirror::ClassLoader> h_parent =
10130 hs.NewHandle<mirror::ClassLoader>(soa.Decode<mirror::ClassLoader>(parent_loader));
10131 Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries =
10132 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries));
Brad Stenning9c924e82021-10-11 19:09:00 -070010133 Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries_after =
10134 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries_after));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010135
10136 ObjPtr<mirror::ClassLoader> loader = CreateWellKnownClassLoader(
10137 self,
10138 dex_files,
10139 h_loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +000010140 h_parent,
Brad Stenning9c924e82021-10-11 19:09:00 -070010141 h_shared_libraries,
10142 h_shared_libraries_after);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010143
10144 // Make it a global ref and return.
10145 ScopedLocalRef<jobject> local_ref(
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010146 soa.Env(), soa.Env()->AddLocalReference<jobject>(loader));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010147 return soa.Env()->NewGlobalRef(local_ref.get());
10148}
10149
Calin Juravle7865ac72017-06-28 11:03:12 -070010150jobject ClassLinker::CreatePathClassLoader(Thread* self,
10151 const std::vector<const DexFile*>& dex_files) {
10152 return CreateWellKnownClassLoader(self,
10153 dex_files,
10154 WellKnownClasses::dalvik_system_PathClassLoader,
10155 nullptr);
10156}
10157
Andreas Gampe8ac75952015-06-02 21:01:45 -070010158void ClassLinker::DropFindArrayClassCache() {
10159 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
10160 find_array_class_cache_next_victim_ = 0;
10161}
10162
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010163void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010164 Thread* const self = Thread::Current();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010165 for (const ClassLoaderData& data : class_loaders_) {
Mathieu Chartier4843bd52015-10-01 17:08:44 -070010166 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
Mathieu Chartierc4f39252016-10-05 18:32:08 -070010167 ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
10168 self->DecodeJObject(data.weak_root));
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010169 if (class_loader != nullptr) {
Vladimir Markod93e3742018-07-18 10:58:13 +010010170 visitor->Visit(class_loader);
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010171 }
10172 }
10173}
10174
Alexey Grebenkin252a4e42018-04-02 18:18:01 +030010175void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
10176 for (const ClassLoaderData& data : class_loaders_) {
10177 LinearAlloc* alloc = data.allocator;
10178 if (alloc != nullptr && !visitor->Visit(alloc)) {
10179 break;
10180 }
10181 }
10182}
10183
Mathieu Chartierbc5a7952016-10-17 15:46:31 -070010184void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
10185 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier00310e02015-10-17 12:46:42 -070010186 DCHECK(dex_file != nullptr);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010187 Thread* const self = Thread::Current();
10188 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Markobcf17522018-06-01 13:14:32 +010010189 ClassTable* const table = ClassTableForClassLoader(class_loader);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010190 DCHECK(table != nullptr);
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -070010191 if (table->InsertStrongRoot(dex_file) && class_loader != nullptr) {
Mathieu Chartier00310e02015-10-17 12:46:42 -070010192 // It was not already inserted, perform the write barrier to let the GC know the class loader's
10193 // class table was modified.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -070010194 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010195 }
10196}
10197
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010198void ClassLinker::CleanupClassLoaders() {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010199 Thread* const self = Thread::Current();
Mathieu Chartier65975772016-08-05 10:46:36 -070010200 std::vector<ClassLoaderData> to_delete;
10201 // Do the delete outside the lock to avoid lock violation in jit code cache.
10202 {
10203 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10204 for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
10205 const ClassLoaderData& data = *it;
10206 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
Mathieu Chartierc4f39252016-10-05 18:32:08 -070010207 ObjPtr<mirror::ClassLoader> class_loader =
10208 ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
Mathieu Chartier65975772016-08-05 10:46:36 -070010209 if (class_loader != nullptr) {
10210 ++it;
10211 } else {
10212 VLOG(class_linker) << "Freeing class loader";
10213 to_delete.push_back(data);
10214 it = class_loaders_.erase(it);
10215 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010216 }
10217 }
Mathieu Chartier65975772016-08-05 10:46:36 -070010218 for (ClassLoaderData& data : to_delete) {
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +030010219 // CHA unloading analysis and SingleImplementaion cleanups are required.
Andreas Gampe98ea9d92018-10-19 14:06:15 -070010220 DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
Mathieu Chartier65975772016-08-05 10:46:36 -070010221 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010222}
10223
Mathieu Chartier65975772016-08-05 10:46:36 -070010224class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
10225 public:
10226 FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
10227 : method_(method),
10228 pointer_size_(pointer_size) {}
10229
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010010230 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
Mathieu Chartier65975772016-08-05 10:46:36 -070010231 if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
10232 holder_ = klass;
10233 }
10234 // Return false to stop searching if holder_ is not null.
10235 return holder_ == nullptr;
10236 }
10237
Mathieu Chartier28357fa2016-10-18 16:27:40 -070010238 ObjPtr<mirror::Class> holder_ = nullptr;
Mathieu Chartier65975772016-08-05 10:46:36 -070010239 const ArtMethod* const method_;
10240 const PointerSize pointer_size_;
10241};
10242
Vladimir Markoa8bba7d2018-05-30 15:18:48 +010010243ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
Mathieu Chartier65975772016-08-05 10:46:36 -070010244 ScopedTrace trace(__FUNCTION__); // Since this function is slow, have a trace to notify people.
10245 CHECK(method->IsCopied());
10246 FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
10247 VisitClasses(&visitor);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +010010248 return visitor.holder_;
Mathieu Chartier65975772016-08-05 10:46:36 -070010249}
10250
Calin Juravle33787682019-07-26 14:27:18 -070010251bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const
10252 REQUIRES_SHARED(Locks::mutator_lock_) {
10253 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10254 LOG(FATAL) << "UNREACHABLE";
10255 UNREACHABLE();
10256}
10257
10258bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const
10259 REQUIRES_SHARED(Locks::mutator_lock_) {
10260 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10261 LOG(FATAL) << "UNREACHABLE";
10262 UNREACHABLE();
10263}
10264
10265bool ClassLinker::DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const {
10266 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10267 LOG(FATAL) << "UNREACHABLE";
10268 UNREACHABLE();
10269}
10270
Calin Juravle2c2724c2021-01-14 19:54:23 -080010271void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) {
10272 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10273 LOG(FATAL) << "UNREACHABLE";
10274 UNREACHABLE();
10275}
10276
Roland Levillain0e840272018-08-23 19:55:30 +010010277// Instantiate ClassLinker::AllocClass.
Andreas Gampe98ea9d92018-10-19 14:06:15 -070010278template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
Roland Levillain0e840272018-08-23 19:55:30 +010010279 Thread* self,
10280 ObjPtr<mirror::Class> java_lang_Class,
10281 uint32_t class_size);
Andreas Gampe98ea9d92018-10-19 14:06:15 -070010282template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
Roland Levillain0e840272018-08-23 19:55:30 +010010283 Thread* self,
10284 ObjPtr<mirror::Class> java_lang_Class,
10285 uint32_t class_size);
10286
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070010287} // namespace art