blob: eba25ee50241c970d41ca332724208626d0402cb [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070016
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070017#include "class_linker.h"
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070018
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070019#include <unistd.h>
20
Alex Lighteb7c1442015-08-31 13:17:42 -070021#include <algorithm>
Brian Carlstromdbc05252011-09-09 01:59:59 -070022#include <deque>
Vladimir Markobf121912019-06-04 13:49:05 +010023#include <forward_list>
Ian Rogerscf7f1912014-10-22 22:06:39 -070024#include <iostream>
Vladimir Marko21300532017-01-24 18:06:55 +000025#include <map>
Ian Rogers700a4022014-05-19 16:49:03 -070026#include <memory>
Fred Shih381e4ca2014-08-25 17:24:27 -070027#include <queue>
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070028#include <string>
Andreas Gampe9f3928f2019-02-04 11:19:31 -080029#include <string_view>
Alex Lighteb7c1442015-08-31 13:17:42 -070030#include <tuple>
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070031#include <utility>
Elliott Hughes90a33692011-08-30 13:27:07 -070032#include <vector>
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070033
Andreas Gampe46ee31b2016-12-14 10:11:49 -080034#include "android-base/stringprintf.h"
35
Mathieu Chartierc7853442015-03-27 14:35:38 -070036#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "art_method-inl.h"
Vladimir Markobf121912019-06-04 13:49:05 +010038#include "barrier.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070039#include "base/arena_allocator.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080040#include "base/casts.h"
Andreas Gampe19f54162019-05-14 16:16:28 -070041#include "base/file_utils.h"
Vladimir Marko782fb712020-12-23 12:47:31 +000042#include "base/hash_map.h"
43#include "base/hash_set.h"
David Sehr67bf42e2018-02-26 16:43:04 -080044#include "base/leb128.h"
Elliott Hughes07ed66b2012-12-12 18:34:25 -080045#include "base/logging.h"
Eric Holk74584e62021-02-18 14:39:17 -080046#include "base/metrics/metrics.h"
Vladimir Markobf121912019-06-04 13:49:05 +010047#include "base/mutex-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080048#include "base/os.h"
49#include "base/quasi_atomic.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070050#include "base/scoped_arena_containers.h"
Narayan Kamathd1c606f2014-06-09 16:50:19 +010051#include "base/scoped_flock.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080052#include "base/stl_util.h"
Vladimir Markob9c29f62019-03-20 14:22:51 +000053#include "base/string_view_cpp20.h"
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -080054#include "base/systrace.h"
Vladimir Marko80afd022015-05-19 18:08:00 +010055#include "base/time_utils.h"
Elliott Hughes76160052012-12-12 16:31:20 -080056#include "base/unix_file/fd_file.h"
David Sehrc431b9d2018-03-02 12:01:51 -080057#include "base/utils.h"
Andreas Gampeb9aec2c2015-04-23 22:23:47 -070058#include "base/value_object.h"
Mingyao Yang063fc772016-08-02 11:02:54 -070059#include "cha.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080060#include "class_linker-inl.h"
Calin Juravle57d0acc2017-07-11 17:41:30 -070061#include "class_loader_utils.h"
Vladimir Marko5868ada2020-05-12 11:50:34 +010062#include "class_root-inl.h"
Mathieu Chartiere4275c02015-08-06 15:34:15 -070063#include "class_table-inl.h"
Vladimir Marko2b5eaa22013-12-13 13:59:30 +000064#include "compiler_callbacks.h"
Vladimir Marko606adb32018-04-05 14:49:24 +010065#include "debug_print.h"
Elliott Hughes4740cdf2011-12-07 14:07:12 -080066#include "debugger.h"
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -070067#include "dex/class_accessor-inl.h"
David Sehrb2ec9f52018-02-21 13:20:31 -080068#include "dex/descriptors_names.h"
David Sehr9e734c72018-01-04 17:56:19 -080069#include "dex/dex_file-inl.h"
70#include "dex/dex_file_exception_helpers.h"
71#include "dex/dex_file_loader.h"
Andreas Gampead1aa632019-01-02 10:30:54 -080072#include "dex/signature-inl.h"
David Sehr0225f8e2018-01-31 08:52:24 +000073#include "dex/utf.h"
Vladimir Marko5115a4d2019-10-17 14:56:47 +010074#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070075#include "entrypoints/runtime_asm_entrypoints.h"
Alex Light705ad492015-09-21 11:36:30 -070076#include "experimental_flags.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070077#include "gc/accounting/card_table-inl.h"
Mathieu Chartier03c1dd92016-03-07 16:13:54 -080078#include "gc/accounting/heap_bitmap-inl.h"
Chang Xingba17dbd2017-06-28 21:27:56 +000079#include "gc/accounting/space_bitmap-inl.h"
Andreas Gampe1c158a02017-07-13 17:26:19 -070080#include "gc/heap-visit-objects-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070081#include "gc/heap.h"
Mathieu Chartier1b1e31f2016-05-19 10:13:04 -070082#include "gc/scoped_gc_critical_section.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070083#include "gc/space/image_space.h"
Vladimir Marko8d6768d2017-03-14 10:13:21 +000084#include "gc/space/space-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070085#include "gc_root-inl.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -070086#include "handle_scope-inl.h"
Andreas Gampeaa120012018-03-28 16:23:24 -070087#include "hidden_api.h"
Mathieu Chartier4a26f172016-01-26 14:26:18 -080088#include "image-inl.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070089#include "imt_conflict_table.h"
90#include "imtable-inl.h"
Mathieu Chartier74ccee62018-10-10 10:30:29 -070091#include "intern_table-inl.h"
Ian Rogers64b6d142012-10-29 16:34:15 -070092#include "interpreter/interpreter.h"
Nicolas Geoffray0315efa2020-06-26 11:42:39 +010093#include "interpreter/mterp/nterp.h"
David Srbeckyfb3de3d2018-01-29 16:11:49 +000094#include "jit/debugger_interface.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080095#include "jit/jit.h"
96#include "jit/jit_code_cache.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010097#include "jni/java_vm_ext.h"
98#include "jni/jni_internal.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070099#include "linear_alloc.h"
Andreas Gampe8e0f0432018-10-24 13:38:03 -0700100#include "mirror/array-alloc-inl.h"
101#include "mirror/array-inl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +0000102#include "mirror/call_site.h"
Andreas Gampe70f5fd02018-10-24 19:58:37 -0700103#include "mirror/class-alloc-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800104#include "mirror/class-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700105#include "mirror/class.h"
Alex Lightd6251582016-10-31 11:12:30 -0700106#include "mirror/class_ext.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800107#include "mirror/class_loader.h"
Ian Rogers39ebcb82013-05-30 16:57:23 -0700108#include "mirror/dex_cache-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700109#include "mirror/dex_cache.h"
Narayan Kamath000e1882016-10-24 17:14:25 +0100110#include "mirror/emulated_stack_frame.h"
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700111#include "mirror/field.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800112#include "mirror/iftable-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700113#include "mirror/method.h"
Narayan Kamathafa48272016-08-03 12:46:58 +0100114#include "mirror/method_handle_impl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +0000115#include "mirror/method_handles_lookup.h"
Steven Morelande431e272017-07-18 16:53:49 -0700116#include "mirror/method_type.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800117#include "mirror/object-inl.h"
Chang Xingba17dbd2017-06-28 21:27:56 +0000118#include "mirror/object-refvisitor-inl.h"
Alex Lighta9bbc082019-11-14 14:51:41 -0800119#include "mirror/object.h"
Andreas Gampe52ecb652018-10-24 15:18:21 -0700120#include "mirror/object_array-alloc-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700121#include "mirror/object_array-inl.h"
Alex Light133987d2020-03-26 19:22:12 +0000122#include "mirror/object_array.h"
Chris Wailes0c61be42018-09-26 17:27:34 -0700123#include "mirror/object_reference.h"
124#include "mirror/object_reference-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800125#include "mirror/proxy.h"
Fred Shih4ee7a662014-07-11 09:59:27 -0700126#include "mirror/reference-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800127#include "mirror/stack_trace_element.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700128#include "mirror/string-inl.h"
Andreas Gampe501c3b02019-04-17 21:54:27 +0000129#include "mirror/throwable.h"
Orion Hodson005ac512017-10-24 15:43:43 +0100130#include "mirror/var_handle.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700131#include "native/dalvik_system_DexFile.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -0700132#include "nativehelper/scoped_local_ref.h"
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000133#include "nterp_helpers.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700134#include "oat.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700135#include "oat_file-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700136#include "oat_file.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700137#include "oat_file_assistant.h"
138#include "oat_file_manager.h"
139#include "object_lock.h"
David Sehr82d046e2018-04-23 08:14:19 -0700140#include "profile/profile_compilation_info.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -0700141#include "runtime.h"
Andreas Gampeac30fa22017-01-18 21:02:36 -0800142#include "runtime_callbacks.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -0700143#include "scoped_thread_state_change-inl.h"
Ian Rogers7b078e82014-09-10 14:44:24 -0700144#include "thread-inl.h"
Alex Light133987d2020-03-26 19:22:12 +0000145#include "thread.h"
Mingyao Yang063fc772016-08-02 11:02:54 -0700146#include "thread_list.h"
Mathieu Chartier7778b882015-10-05 16:41:10 -0700147#include "trace.h"
Vladimir Markob68bb7a2020-03-17 10:55:25 +0000148#include "transaction.h"
Nicolas Geoffray6df45112021-02-07 21:51:58 +0000149#include "vdex_file.h"
Andreas Gampea43ba3d2019-03-13 15:49:20 -0700150#include "verifier/class_verifier.h"
Nicolas Geoffray80789962021-04-30 16:50:39 +0100151#include "verifier/verifier_deps.h"
Elliott Hugheseac76672012-05-24 21:56:51 -0700152#include "well_known_classes.h"
Carl Shapiro0e5d75d2011-07-06 18:28:37 -0700153
Nicolas Geoffray00391822019-12-10 10:17:23 +0000154#include "interpreter/interpreter_mterp_impl.h"
155
Carl Shapiro0e5d75d2011-07-06 18:28:37 -0700156namespace art {
157
Andreas Gampe46ee31b2016-12-14 10:11:49 -0800158using android::base::StringPrintf;
159
Orion Hodson5880c772020-07-28 20:12:08 +0100160static constexpr bool kCheckImageObjects = kIsDebugBuild;
Mathieu Chartier8790c7f2016-03-31 15:05:45 -0700161static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700162
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700163static void ThrowNoClassDefFoundError(const char* fmt, ...)
164 __attribute__((__format__(__printf__, 1, 2)))
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700165 REQUIRES_SHARED(Locks::mutator_lock_);
Elliott Hughes0512f022012-03-15 22:10:52 -0700166static void ThrowNoClassDefFoundError(const char* fmt, ...) {
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700167 va_list args;
168 va_start(args, fmt);
Ian Rogers62d6c772013-02-27 08:32:07 -0800169 Thread* self = Thread::Current();
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000170 self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
Ian Rogerscab01012012-01-10 17:35:46 -0800171 va_end(args);
172}
173
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100174static ObjPtr<mirror::Object> GetErroneousStateError(ObjPtr<mirror::Class> c)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700175 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightd6251582016-10-31 11:12:30 -0700176 ObjPtr<mirror::ClassExt> ext(c->GetExtData());
177 if (ext == nullptr) {
178 return nullptr;
179 } else {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100180 return ext->GetErroneousStateError();
Alex Lightd6251582016-10-31 11:12:30 -0700181 }
182}
183
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100184static bool IsVerifyError(ObjPtr<mirror::Object> obj)
Alex Lightd6251582016-10-31 11:12:30 -0700185 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100186 // This is slow, but we only use it for rethrowing an error, and for DCHECK.
187 return obj->GetClass()->DescriptorEquals("Ljava/lang/VerifyError;");
188}
189
190// Helper for ThrowEarlierClassFailure. Throws the stored error.
191static void HandleEarlierErroneousStateError(Thread* self,
192 ClassLinker* class_linker,
193 ObjPtr<mirror::Class> c)
194 REQUIRES_SHARED(Locks::mutator_lock_) {
195 ObjPtr<mirror::Object> obj = GetErroneousStateError(c);
Andreas Gampe99babb62015-11-02 16:20:00 -0800196 DCHECK(obj != nullptr);
197 self->AssertNoPendingException();
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100198 DCHECK(!obj->IsClass());
199 ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
200 ObjPtr<mirror::Class> error_class = obj->GetClass();
201 CHECK(throwable_class->IsAssignableFrom(error_class));
202 self->SetException(obj->AsThrowable());
Andreas Gampe99babb62015-11-02 16:20:00 -0800203 self->AssertPendingException();
204}
205
Nicolas Geoffray60d4abc2020-07-27 13:58:51 +0000206static void ChangeInterpreterBridgeToNterp(ArtMethod* method, ClassLinker* class_linker)
207 REQUIRES_SHARED(Locks::mutator_lock_) {
208 Runtime* runtime = Runtime::Current();
209 if (class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()) &&
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000210 CanMethodUseNterp(method)) {
Nicolas Geoffray60d4abc2020-07-27 13:58:51 +0000211 if (method->GetDeclaringClass()->IsVisiblyInitialized() ||
212 !NeedsClinitCheckBeforeCall(method)) {
213 runtime->GetInstrumentation()->UpdateMethodsCode(method, interpreter::GetNterpEntryPoint());
214 } else {
215 // Put the resolution stub, which will initialize the class and then
216 // call the method with nterp.
217 runtime->GetInstrumentation()->UpdateMethodsCode(method, GetQuickResolutionStub());
218 }
219 }
220}
221
Andreas Gampe5b20b352018-10-11 19:03:20 -0700222// Ensures that methods have the kAccSkipAccessChecks bit set. We use the
223// kAccVerificationAttempted bit on the class access flags to determine whether this has been done
224// before.
Andreas Gampe5b20b352018-10-11 19:03:20 -0700225static void EnsureSkipAccessChecksMethods(Handle<mirror::Class> klass, PointerSize pointer_size)
226 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray00391822019-12-10 10:17:23 +0000227 Runtime* runtime = Runtime::Current();
228 ClassLinker* class_linker = runtime->GetClassLinker();
Andreas Gampe5b20b352018-10-11 19:03:20 -0700229 if (!klass->WasVerificationAttempted()) {
230 klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
231 klass->SetVerificationAttempted();
Nicolas Geoffray00391822019-12-10 10:17:23 +0000232 // Now that the class has passed verification, try to set nterp entrypoints
233 // to methods that currently use the switch interpreter.
234 if (interpreter::CanRuntimeUseNterp()) {
235 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
Nicolas Geoffray60d4abc2020-07-27 13:58:51 +0000236 ChangeInterpreterBridgeToNterp(&m, class_linker);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000237 }
238 }
Andreas Gampe5b20b352018-10-11 19:03:20 -0700239 }
240}
241
Vladimir Markobf121912019-06-04 13:49:05 +0100242// Callback responsible for making a batch of classes visibly initialized
243// after all threads have called it from a checkpoint, ensuring visibility.
244class ClassLinker::VisiblyInitializedCallback final
245 : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
246 public:
247 explicit VisiblyInitializedCallback(ClassLinker* class_linker)
248 : class_linker_(class_linker),
249 num_classes_(0u),
250 thread_visibility_counter_(0),
251 barriers_() {
252 std::fill_n(classes_, kMaxClasses, nullptr);
253 }
254
255 bool IsEmpty() const {
256 DCHECK_LE(num_classes_, kMaxClasses);
257 return num_classes_ == 0u;
258 }
259
260 bool IsFull() const {
261 DCHECK_LE(num_classes_, kMaxClasses);
262 return num_classes_ == kMaxClasses;
263 }
264
265 void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
266 DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
267 DCHECK(!IsFull());
268 classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
269 ++num_classes_;
270 }
271
272 void AddBarrier(Barrier* barrier) {
273 barriers_.push_front(barrier);
274 }
275
276 std::forward_list<Barrier*> GetAndClearBarriers() {
277 std::forward_list<Barrier*> result;
278 result.swap(barriers_);
279 result.reverse(); // Return barriers in insertion order.
280 return result;
281 }
282
283 void MakeVisible(Thread* self) {
284 DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
285 size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
286 AdjustThreadVisibilityCounter(self, count);
287 }
288
289 void Run(Thread* self) override {
290 self->ClearMakeVisiblyInitializedCounter();
291 AdjustThreadVisibilityCounter(self, -1);
292 }
293
294 private:
295 void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
296 ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
297 if (old + adjustment == 0) {
298 // All threads passed the checkpoint. Mark classes as visibly initialized.
299 {
300 ScopedObjectAccess soa(self);
301 StackHandleScope<1u> hs(self);
302 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
303 JavaVMExt* vm = self->GetJniEnv()->GetVm();
304 for (size_t i = 0, num = num_classes_; i != num; ++i) {
305 klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
306 vm->DeleteWeakGlobalRef(self, classes_[i]);
307 if (klass != nullptr) {
Vladimir Markobf121912019-06-04 13:49:05 +0100308 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100309 class_linker_->FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100310 }
311 }
312 num_classes_ = 0u;
313 }
314 class_linker_->VisiblyInitializedCallbackDone(self, this);
315 }
316 }
317
Vladimir Marko9f18fbc2019-07-31 15:06:12 +0100318 static constexpr size_t kMaxClasses = 16;
Vladimir Markobf121912019-06-04 13:49:05 +0100319
320 ClassLinker* const class_linker_;
321 size_t num_classes_;
322 jweak classes_[kMaxClasses];
323
324 // The thread visibility counter starts at 0 and it is incremented by the number of
325 // threads that need to run this callback (by the thread that request the callback
326 // to be run) and decremented once for each `Run()` execution. When it reaches 0,
327 // whether after the increment or after a decrement, we know that `Run()` was executed
328 // for all threads and therefore we can mark the classes as visibly initialized.
329 std::atomic<ssize_t> thread_visibility_counter_;
330
331 // List of barries to `Pass()` for threads that wait for the callback to complete.
332 std::forward_list<Barrier*> barriers_;
333};
334
335void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
336 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
337 return; // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
338 }
339 std::optional<Barrier> maybe_barrier; // Avoid constructing the Barrier for `wait == false`.
340 if (wait) {
341 maybe_barrier.emplace(0);
342 }
343 int wait_count = 0;
344 VisiblyInitializedCallback* callback = nullptr;
345 {
346 MutexLock lock(self, visibly_initialized_callback_lock_);
347 if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
348 callback = visibly_initialized_callback_.release();
349 running_visibly_initialized_callbacks_.push_front(*callback);
350 }
351 if (wait) {
352 DCHECK(maybe_barrier.has_value());
353 Barrier* barrier = std::addressof(*maybe_barrier);
354 for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
355 cb.AddBarrier(barrier);
356 ++wait_count;
357 }
358 }
359 }
360 if (callback != nullptr) {
361 callback->MakeVisible(self);
362 }
363 if (wait_count != 0) {
364 DCHECK(maybe_barrier.has_value());
365 maybe_barrier->Increment(self, wait_count);
366 }
367}
368
369void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
370 VisiblyInitializedCallback* callback) {
371 MutexLock lock(self, visibly_initialized_callback_lock_);
372 // Pass the barriers if requested.
373 for (Barrier* barrier : callback->GetAndClearBarriers()) {
374 barrier->Pass(self);
375 }
376 // Remove the callback from the list of running callbacks.
377 auto before = running_visibly_initialized_callbacks_.before_begin();
378 auto it = running_visibly_initialized_callbacks_.begin();
379 DCHECK(it != running_visibly_initialized_callbacks_.end());
380 while (std::addressof(*it) != callback) {
381 before = it;
382 ++it;
383 DCHECK(it != running_visibly_initialized_callbacks_.end());
384 }
385 running_visibly_initialized_callbacks_.erase_after(before);
386 // Reuse or destroy the callback object.
387 if (visibly_initialized_callback_ == nullptr) {
388 visibly_initialized_callback_.reset(callback);
389 } else {
390 delete callback;
391 }
392}
393
Alex Lightfb119572019-09-18 15:04:53 -0700394void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
395 ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
396 if (cb != nullptr) {
397 cb->MakeVisible(self);
398 }
399 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
400 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
401}
402
Vladimir Markobf121912019-06-04 13:49:05 +0100403ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
404 Thread* self, Handle<mirror::Class> klass) {
405 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
406 // Thanks to the x86 memory model, we do not need any memory fences and
407 // we can immediately mark the class as visibly initialized.
408 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100409 FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100410 return nullptr;
411 }
412 if (Runtime::Current()->IsActiveTransaction()) {
413 // Transactions are single-threaded, so we can mark the class as visibly intialized.
414 // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
415 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100416 FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100417 return nullptr;
418 }
419 mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
420 MutexLock lock(self, visibly_initialized_callback_lock_);
421 if (visibly_initialized_callback_ == nullptr) {
422 visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
423 }
424 DCHECK(!visibly_initialized_callback_->IsFull());
425 visibly_initialized_callback_->AddClass(self, klass.Get());
426
427 if (visibly_initialized_callback_->IsFull()) {
428 VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
429 running_visibly_initialized_callbacks_.push_front(*callback);
430 return callback;
431 } else {
432 return nullptr;
433 }
434}
435
Vladimir Marko86c87522020-05-11 16:55:55 +0100436const void* ClassLinker::RegisterNative(
437 Thread* self, ArtMethod* method, const void* native_method) {
438 CHECK(method->IsNative()) << method->PrettyMethod();
439 CHECK(native_method != nullptr) << method->PrettyMethod();
440 void* new_native_method = nullptr;
441 Runtime* runtime = Runtime::Current();
442 runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
443 native_method,
444 /*out*/&new_native_method);
445 if (method->IsCriticalNative()) {
446 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
447 // Remove old registered method if any.
448 auto it = critical_native_code_with_clinit_check_.find(method);
449 if (it != critical_native_code_with_clinit_check_.end()) {
450 critical_native_code_with_clinit_check_.erase(it);
451 }
452 // To ensure correct memory visibility, we need the class to be visibly
453 // initialized before we can set the JNI entrypoint.
454 if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
455 method->SetEntryPointFromJni(new_native_method);
456 } else {
457 critical_native_code_with_clinit_check_.emplace(method, new_native_method);
458 }
459 } else {
460 method->SetEntryPointFromJni(new_native_method);
461 }
462 return new_native_method;
463}
464
465void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
466 CHECK(method->IsNative()) << method->PrettyMethod();
467 // Restore stub to lookup native pointer via dlsym.
468 if (method->IsCriticalNative()) {
469 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
470 auto it = critical_native_code_with_clinit_check_.find(method);
471 if (it != critical_native_code_with_clinit_check_.end()) {
472 critical_native_code_with_clinit_check_.erase(it);
473 }
474 method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
475 } else {
476 method->SetEntryPointFromJni(GetJniDlsymLookupStub());
477 }
478}
479
480const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
481 if (method->IsCriticalNative()) {
482 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
483 auto it = critical_native_code_with_clinit_check_.find(method);
484 if (it != critical_native_code_with_clinit_check_.end()) {
485 return it->second;
486 }
487 const void* native_code = method->GetEntryPointFromJni();
488 return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
489 } else {
490 const void* native_code = method->GetEntryPointFromJni();
491 return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
492 }
493}
494
Andreas Gampe7b3063b2019-01-07 14:12:52 -0800495void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
496 bool wrap_in_no_class_def,
497 bool log) {
Elliott Hughes5c599942012-06-13 16:45:05 -0700498 // The class failed to initialize on a previous attempt, so we want to throw
499 // a NoClassDefFoundError (v2 2.17.5). The exception to this rule is if we
500 // failed in verification, in which case v2 5.4.1 says we need to re-throw
501 // the previous error.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800502 Runtime* const runtime = Runtime::Current();
503 if (!runtime->IsAotCompiler()) { // Give info if this occurs at runtime.
Andreas Gampe3d6b4702015-09-21 08:35:52 -0700504 std::string extra;
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100505 ObjPtr<mirror::Object> verify_error = GetErroneousStateError(c);
Vladimir Markobb206de2019-03-28 10:30:32 +0000506 if (verify_error != nullptr) {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100507 DCHECK(!verify_error->IsClass());
508 extra = verify_error->AsThrowable()->Dump();
Andreas Gampe3d6b4702015-09-21 08:35:52 -0700509 }
Andreas Gampe7b3063b2019-01-07 14:12:52 -0800510 if (log) {
511 LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
512 << ": " << extra;
513 }
Ian Rogers87e552d2012-08-31 15:54:48 -0700514 }
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700515
David Sehr709b0702016-10-13 09:12:37 -0700516 CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
Ian Rogers62d6c772013-02-27 08:32:07 -0800517 Thread* self = Thread::Current();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800518 if (runtime->IsAotCompiler()) {
Ian Rogers7b078e82014-09-10 14:44:24 -0700519 // At compile time, accurate errors and NCDFE are disabled to speed compilation.
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700520 ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000521 self->SetException(pre_allocated);
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700522 } else {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100523 ObjPtr<mirror::Object> erroneous_state_error = GetErroneousStateError(c);
524 if (erroneous_state_error != nullptr) {
Andreas Gampecb086952015-11-02 16:20:00 -0800525 // Rethrow stored error.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100526 HandleEarlierErroneousStateError(self, this, c);
Andreas Gampecb086952015-11-02 16:20:00 -0800527 }
Alex Lightd6251582016-10-31 11:12:30 -0700528 // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
529 // might have meant to go down the earlier if statement with the original error but it got
530 // swallowed by the OOM so we end up here.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100531 if (erroneous_state_error == nullptr ||
532 (wrap_in_no_class_def && !IsVerifyError(erroneous_state_error))) {
Andreas Gampecb086952015-11-02 16:20:00 -0800533 // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
534 // the top-level exception must be a NoClassDefFoundError. The potentially already pending
535 // exception will be a cause.
536 self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
David Sehr709b0702016-10-13 09:12:37 -0700537 c->PrettyDescriptor().c_str());
Ian Rogers7b078e82014-09-10 14:44:24 -0700538 }
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700539 }
540}
541
Brian Carlstromb23eab12014-10-08 17:55:21 -0700542static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700543 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstromb23eab12014-10-08 17:55:21 -0700544 if (VLOG_IS_ON(class_linker)) {
545 std::string temp;
546 LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000547 << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
Brian Carlstromb23eab12014-10-08 17:55:21 -0700548 }
549}
550
551static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700552 REQUIRES_SHARED(Locks::mutator_lock_) {
Elliott Hughesa4f94742012-05-29 16:28:38 -0700553 Thread* self = Thread::Current();
554 JNIEnv* env = self->GetJniEnv();
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700555
556 ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
Andreas Gampe2ed8def2014-08-28 14:41:02 -0700557 CHECK(cause.get() != nullptr);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700558
David Srbecky346fd962020-07-27 16:51:00 +0100559 // Boot classpath classes should not fail initialization. This is a consistency debug check.
560 // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
Andreas Gampe1e8a3952016-11-30 10:13:19 -0800561 if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
Andreas Gampe22f71d22016-11-21 10:10:08 -0800562 std::string tmp;
Alex Light5047d9f2018-03-09 15:44:31 -0800563 // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
564 // make sure to only do it if we don't have AsyncExceptions being thrown around since those
565 // could have caused the error.
566 bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
567 LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
568 << " failed initialization: "
569 << self->GetException()->Dump();
Andreas Gampe22f71d22016-11-21 10:10:08 -0800570 }
571
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700572 env->ExceptionClear();
Elliott Hughesa4f94742012-05-29 16:28:38 -0700573 bool is_error = env->IsInstanceOf(cause.get(), WellKnownClasses::java_lang_Error);
574 env->Throw(cause.get());
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700575
Elliott Hughesa4f94742012-05-29 16:28:38 -0700576 // We only wrap non-Error exceptions; an Error can just be used as-is.
577 if (!is_error) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000578 self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700579 }
Brian Carlstromb23eab12014-10-08 17:55:21 -0700580 VlogClassInitializationFailure(klass);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700581}
582
Andreas Gampe87658f32019-04-18 18:39:02 +0000583ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
Andreas Gampe2af99022017-04-25 08:32:59 -0700584 : boot_class_table_(new ClassTable()),
585 failed_dex_cache_class_lookups_(0),
Ian Rogers98379392014-02-24 16:53:16 -0800586 class_roots_(nullptr),
Ian Rogers98379392014-02-24 16:53:16 -0800587 find_array_class_cache_next_victim_(0),
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700588 init_done_(false),
Vladimir Marko1998cd02017-01-13 13:02:58 +0000589 log_new_roots_(false),
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700590 intern_table_(intern_table),
Andreas Gampe87658f32019-04-18 18:39:02 +0000591 fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
Vladimir Marko7dac8642019-11-06 17:09:30 +0000592 jni_dlsym_lookup_trampoline_(nullptr),
Vladimir Markofa458ac2020-02-12 14:08:07 +0000593 jni_dlsym_lookup_critical_trampoline_(nullptr),
Ian Rogers98379392014-02-24 16:53:16 -0800594 quick_resolution_trampoline_(nullptr),
Andreas Gampe2da88232014-02-27 12:26:20 -0800595 quick_imt_conflict_trampoline_(nullptr),
Vladimir Marko8a630572014-04-09 18:45:35 +0100596 quick_generic_jni_trampoline_(nullptr),
Mathieu Chartier2d721012014-11-10 11:08:06 -0800597 quick_to_interpreter_bridge_trampoline_(nullptr),
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000598 nterp_trampoline_(nullptr),
Andreas Gampec1ac9ee2017-07-24 22:35:49 -0700599 image_pointer_size_(kRuntimePointerSize),
Vladimir Markobf121912019-06-04 13:49:05 +0100600 visibly_initialized_callback_lock_("visibly initialized callback lock"),
601 visibly_initialized_callback_(nullptr),
Vladimir Marko86c87522020-05-11 16:55:55 +0100602 critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
603 critical_native_code_with_clinit_check_(),
Andreas Gampe7dface32017-07-25 21:32:59 -0700604 cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
605 // For CHA disabled during Aot, see b/34193647.
606
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700607 CHECK(intern_table_ != nullptr);
Andreas Gampe8ac75952015-06-02 21:01:45 -0700608 static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
609 "Array cache size wrong.");
610 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -0700611}
Brian Carlstroma663ea52011-08-19 23:33:41 -0700612
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800613void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700614 ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800615 if (c2 == nullptr) {
616 LOG(FATAL) << "Could not find class " << descriptor;
617 UNREACHABLE();
618 }
619 if (c1.Get() != c2) {
620 std::ostringstream os1, os2;
621 c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
622 c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
623 LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
624 << ". This is most likely the result of a broken build. Make sure that "
625 << "libcore and art projects match.\n\n"
626 << os1.str() << "\n\n" << os2.str();
627 UNREACHABLE();
628 }
629}
630
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800631bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
632 std::string* error_msg) {
Brian Carlstroma004aa92012-02-08 18:05:09 -0800633 VLOG(startup) << "ClassLinker::Init";
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -0700634
Mathieu Chartiere401d142015-04-22 13:56:20 -0700635 Thread* const self = Thread::Current();
636 Runtime* const runtime = Runtime::Current();
637 gc::Heap* const heap = runtime->GetHeap();
638
Jeff Haodcdc85b2015-12-04 14:06:18 -0800639 CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700640 CHECK(!init_done_);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -0700641
Mathieu Chartiere401d142015-04-22 13:56:20 -0700642 // Use the pointer size from the runtime since we are probably creating the image.
643 image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
644
Elliott Hughes30646832011-10-13 16:59:46 -0700645 // java_lang_Class comes first, it's needed for AllocClass
Mathieu Chartier590fee92013-09-13 13:46:47 -0700646 // The GC can't handle an object with a null class since we can't get the size of this object.
Mathieu Chartier1d27b342014-01-28 12:51:09 -0800647 heap->IncrementDisableMovingGC(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700648 StackHandleScope<64> hs(self); // 64 is picked arbitrarily.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700649 auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
Mathieu Chartierd7a7f2f2018-09-07 11:57:18 -0700650 // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
651 // the incorrect result when comparing to-space vs from-space.
Vladimir Markod7e9bbf2019-03-28 13:18:57 +0000652 Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
Vladimir Marko991cd5c2019-05-30 14:23:39 +0100653 heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800654 CHECK(java_lang_Class != nullptr);
Vladimir Marko317892b2018-05-31 11:11:32 +0100655 java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700656 java_lang_Class->SetClass(java_lang_Class.Get());
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -0700657 if (kUseBakerReadBarrier) {
658 java_lang_Class->AssertReadBarrierState();
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -0800659 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700660 java_lang_Class->SetClassSize(class_class_size);
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700661 java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
Mathieu Chartier1d27b342014-01-28 12:51:09 -0800662 heap->DecrementDisableMovingGC(self);
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700663 // AllocClass(ObjPtr<mirror::Class>) can now be used
Brian Carlstroma0808032011-07-18 00:39:23 -0700664
Elliott Hughes418d20f2011-09-22 14:00:39 -0700665 // Class[] is used for reflection support.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700666 auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700667 Handle<mirror::Class> class_array_class(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700668 AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700669 class_array_class->SetComponentType(java_lang_Class.Get());
Elliott Hughes418d20f2011-09-22 14:00:39 -0700670
Ian Rogers23435d02012-09-24 11:23:12 -0700671 // java_lang_Object comes next so that object_array_class can be created.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700672 Handle<mirror::Class> java_lang_Object(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700673 AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800674 CHECK(java_lang_Object != nullptr);
Ian Rogers23435d02012-09-24 11:23:12 -0700675 // backfill Object as the super class of Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700676 java_lang_Class->SetSuperClass(java_lang_Object.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000677 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
Brian Carlstroma0808032011-07-18 00:39:23 -0700678
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700679 java_lang_Object->SetObjectSize(sizeof(mirror::Object));
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800680 // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
681 // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
Vladimir Marko991cd5c2019-05-30 14:23:39 +0100682 runtime->SetSentinel(heap->AllocNonMovableObject(self,
683 java_lang_Object.Get(),
684 java_lang_Object->GetObjectSize(),
685 VoidFunctor()));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700686
Igor Murashkin86083f72017-10-27 10:59:04 -0700687 // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
Vladimir Marko305c38b2018-02-14 11:50:07 +0000688 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -0700689 // It might seem the lock here is unnecessary, however all the SubtypeCheck
690 // functions are annotated to require locks all the way down.
691 //
692 // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
693 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
Vladimir Marko38b8b252018-01-02 19:07:06 +0000694 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
695 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
Igor Murashkin86083f72017-10-27 10:59:04 -0700696 }
697
Ian Rogers23435d02012-09-24 11:23:12 -0700698 // Object[] next to hold class roots.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700699 Handle<mirror::Class> object_array_class(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700700 AllocClass(self, java_lang_Class.Get(),
701 mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700702 object_array_class->SetComponentType(java_lang_Object.Get());
Brian Carlstroma0808032011-07-18 00:39:23 -0700703
Roland Levillain0e840272018-08-23 19:55:30 +0100704 // Setup java.lang.String.
705 //
706 // We make this class non-movable for the unlikely case where it were to be
707 // moved by a sticky-bit (minor) collection when using the Generational
708 // Concurrent Copying (CC) collector, potentially creating a stale reference
709 // in the `klass_` field of one of its instances allocated in the Large-Object
710 // Space (LOS) -- see the comment about the dirty card scanning logic in
711 // art::gc::collector::ConcurrentCopying::MarkingPhase.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700712 Handle<mirror::Class> java_lang_String(hs.NewHandle(
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700713 AllocClass</* kMovable= */ false>(
Roland Levillain0e840272018-08-23 19:55:30 +0100714 self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700715 java_lang_String->SetStringClass();
Vladimir Marko2c64a832018-01-04 11:31:56 +0000716 mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
Jesse Wilson14150742011-07-29 19:04:44 -0400717
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700718 // Setup java.lang.ref.Reference.
Fred Shih4ee7a662014-07-11 09:59:27 -0700719 Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700720 AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
Fred Shih4ee7a662014-07-11 09:59:27 -0700721 java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000722 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
Fred Shih4ee7a662014-07-11 09:59:27 -0700723
Ian Rogers23435d02012-09-24 11:23:12 -0700724 // Create storage for root classes, save away our work so far (requires descriptors).
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700725 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100726 mirror::ObjectArray<mirror::Class>::Alloc(self,
727 object_array_class.Get(),
728 static_cast<int32_t>(ClassRoot::kMax)));
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700729 CHECK(!class_roots_.IsNull());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100730 SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
731 SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
732 SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
733 SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100734 SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
735 SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700736
Mathieu Chartier6beced42016-11-15 15:51:31 -0800737 // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
738 java_lang_Object->SetIfTable(AllocIfTable(self, 0));
739
Vladimir Marko02610552018-06-04 14:38:00 +0100740 // Create array interface entries to populate once we can load system classes.
741 object_array_class->SetIfTable(AllocIfTable(self, 2));
742 DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
743
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700744 // Setup the primitive type classes.
Vladimir Marko70e2a762019-07-12 16:49:00 +0100745 CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
746 CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
747 CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
748 CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
749 CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
750 CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
751 CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
752 CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
753 CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700754
Vladimir Marko70e2a762019-07-12 16:49:00 +0100755 // Allocate the primitive array classes. We need only the native pointer
756 // array at this point (int[] or long[], depending on architecture) but
757 // we shall perform the same setup steps for all primitive array classes.
758 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
759 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
760 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
761 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
762 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
763 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
764 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
765 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700766
Brian Carlstrom75cb3b42011-07-28 02:13:36 -0700767 // now that these are registered, we can use AllocClass() and AllocObjectArray
Brian Carlstroma0808032011-07-18 00:39:23 -0700768
Ian Rogers52813c92012-10-11 11:50:38 -0700769 // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700770 Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700771 AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100772 SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
Vladimir Marko05792b92015-08-03 11:56:49 +0100773 java_lang_DexCache->SetDexCacheClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700774 java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000775 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700776
Alex Lightd6251582016-10-31 11:12:30 -0700777
778 // Setup dalvik.system.ClassExt
779 Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
780 AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100781 SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000782 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
Alex Lightd6251582016-10-31 11:12:30 -0700783
Mathieu Chartier66f19252012-09-18 08:57:04 -0700784 // Set up array classes for string, field, method
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700785 Handle<mirror::Class> object_array_string(hs.NewHandle(
786 AllocClass(self, java_lang_Class.Get(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700787 mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700788 object_array_string->SetComponentType(java_lang_String.Get());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100789 SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700790
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000791 LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700792 // Create runtime resolution and imt conflict methods.
793 runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000794 runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
795 runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
Ian Rogers4445a7e2012-10-05 17:19:13 -0700796
Ian Rogers23435d02012-09-24 11:23:12 -0700797 // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
798 // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
799 // these roots.
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800800 if (boot_class_path.empty()) {
801 *error_msg = "Boot classpath is empty.";
802 return false;
803 }
Richard Uhlerfbef44d2014-12-23 09:48:51 -0800804 for (auto& dex_file : boot_class_path) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -0800805 if (dex_file == nullptr) {
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800806 *error_msg = "Null dex file.";
807 return false;
808 }
Mathieu Chartier0a19e212019-11-27 14:35:24 -0800809 AppendToBootClassPath(self, dex_file.get());
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800810 boot_dex_files_.push_back(std::move(dex_file));
Mathieu Chartier66f19252012-09-18 08:57:04 -0700811 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700812
813 // now we can use FindSystemClass
814
Dmitry Petrochenkof0972a42014-05-16 17:43:39 +0700815 // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
816 // we do not need friend classes or a publicly exposed setter.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700817 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800818 if (!runtime->IsAotCompiler()) {
Alex Light64ad14d2014-08-19 14:23:13 -0700819 // We need to set up the generic trampolines since we don't have an image.
Vladimir Marko7dac8642019-11-06 17:09:30 +0000820 jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
Vladimir Markofa458ac2020-02-12 14:08:07 +0000821 jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700822 quick_resolution_trampoline_ = GetQuickResolutionStub();
823 quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
Vladimir Marko7dac8642019-11-06 17:09:30 +0000824 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700825 quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000826 nterp_trampoline_ = interpreter::GetNterpEntryPoint();
Alex Light64ad14d2014-08-19 14:23:13 -0700827 }
Dmitry Petrochenkof0972a42014-05-16 17:43:39 +0700828
Alex Lightd6251582016-10-31 11:12:30 -0700829 // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
Vladimir Marko2c64a832018-01-04 11:31:56 +0000830 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800831 CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700832 CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000833 mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800834 CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
Vladimir Marko2c64a832018-01-04 11:31:56 +0000835 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800836 CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700837 CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000838 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
Alex Lightd6251582016-10-31 11:12:30 -0700839 CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
840 CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700841
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800842 // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
843 // in class_table_.
844 CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
Elliott Hughes418d20f2011-09-22 14:00:39 -0700845
Vladimir Marko70e2a762019-07-12 16:49:00 +0100846 // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
847 // arrays - can't be done until Object has a vtable and component classes are loaded.
848 FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
849 FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
850 FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
851 FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
852 FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
853 FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
854 FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
855 FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
856 FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
857 FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
858 FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700859
Ian Rogers23435d02012-09-24 11:23:12 -0700860 // Setup the single, global copy of "iftable".
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700861 auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800862 CHECK(java_lang_Cloneable != nullptr);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700863 auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800864 CHECK(java_io_Serializable != nullptr);
Ian Rogers23435d02012-09-24 11:23:12 -0700865 // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
866 // crawl up and explicitly list all of the supers as well.
Vladimir Marko02610552018-06-04 14:38:00 +0100867 object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
868 object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700869
Vladimir Markob10668c2021-06-10 09:52:53 +0100870 // Check Class[] and Object[]'s interfaces.
871 CHECK_EQ(java_lang_Cloneable.Get(), class_array_class->GetDirectInterface(0));
872 CHECK_EQ(java_io_Serializable.Get(), class_array_class->GetDirectInterface(1));
873 CHECK_EQ(java_lang_Cloneable.Get(), object_array_class->GetDirectInterface(0));
874 CHECK_EQ(java_io_Serializable.Get(), object_array_class->GetDirectInterface(1));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700875
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700876 CHECK_EQ(object_array_string.Get(),
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100877 FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
Brian Carlstrom1f870082011-08-23 16:02:11 -0700878
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800879 // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
Ian Rogers466bb252011-10-14 03:29:56 -0700880
Ian Rogers23435d02012-09-24 11:23:12 -0700881 // Create java.lang.reflect.Proxy root.
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100882 SetClassRoot(ClassRoot::kJavaLangReflectProxy,
883 FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
Ian Rogers466bb252011-10-14 03:29:56 -0700884
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700885 // Create java.lang.reflect.Field.class root.
Vladimir Markoacb906d2018-05-30 10:23:49 +0100886 ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700887 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100888 SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700889
890 // Create java.lang.reflect.Field array root.
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700891 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
892 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100893 SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700894
895 // Create java.lang.reflect.Constructor.class root and array root.
896 class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
897 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100898 SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700899 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
900 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100901 SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700902
903 // Create java.lang.reflect.Method.class root and array root.
904 class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
905 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100906 SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700907 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
908 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100909 SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700910
Orion Hodson005ac512017-10-24 15:43:43 +0100911 // Create java.lang.invoke.CallSite.class root
912 class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
913 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100914 SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100915
Narayan Kamathafa48272016-08-03 12:46:58 +0100916 // Create java.lang.invoke.MethodType.class root
917 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
918 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100919 SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
Narayan Kamathafa48272016-08-03 12:46:58 +0100920
921 // Create java.lang.invoke.MethodHandleImpl.class root
922 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
923 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100924 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
Vladimir Markoc7aa87e2018-05-24 15:19:52 +0100925 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
Narayan Kamathafa48272016-08-03 12:46:58 +0100926
Orion Hodsonc069a302017-01-18 09:23:12 +0000927 // Create java.lang.invoke.MethodHandles.Lookup.class root
928 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
929 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100930 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
Orion Hodsonc069a302017-01-18 09:23:12 +0000931
Orion Hodson005ac512017-10-24 15:43:43 +0100932 // Create java.lang.invoke.VarHandle.class root
933 class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
Orion Hodsonc069a302017-01-18 09:23:12 +0000934 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100935 SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100936
937 // Create java.lang.invoke.FieldVarHandle.class root
938 class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
939 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100940 SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100941
Orion Hodsondd411962021-06-25 08:55:22 +0100942 // Create java.lang.invoke.StaticFieldVarHandle.class root
943 class_root = FindSystemClass(self, "Ljava/lang/invoke/StaticFieldVarHandle;");
944 CHECK(class_root != nullptr);
945 SetClassRoot(ClassRoot::kJavaLangInvokeStaticFieldVarHandle, class_root);
946
Orion Hodson005ac512017-10-24 15:43:43 +0100947 // Create java.lang.invoke.ArrayElementVarHandle.class root
948 class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
949 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100950 SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100951
952 // Create java.lang.invoke.ByteArrayViewVarHandle.class root
953 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
954 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100955 SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100956
957 // Create java.lang.invoke.ByteBufferViewVarHandle.class root
958 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
959 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100960 SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
Orion Hodsonc069a302017-01-18 09:23:12 +0000961
Narayan Kamath000e1882016-10-24 17:14:25 +0100962 class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
963 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100964 SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
Narayan Kamath000e1882016-10-24 17:14:25 +0100965
Brian Carlstrom1f870082011-08-23 16:02:11 -0700966 // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
Fred Shih4ee7a662014-07-11 09:59:27 -0700967 // finish initializing Reference class
Vladimir Marko2c64a832018-01-04 11:31:56 +0000968 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800969 CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
Fred Shih4ee7a662014-07-11 09:59:27 -0700970 CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700971 CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
972 mirror::Reference::ClassSize(image_pointer_size_));
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700973 class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700974 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700975 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700976 class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700977 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700978 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700979 class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700980 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700981 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700982 class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700983 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700984 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700985
Ian Rogers23435d02012-09-24 11:23:12 -0700986 // Setup the ClassLoader, verifying the object_size_.
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700987 class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
Mathieu Chartiere4275c02015-08-06 15:34:15 -0700988 class_root->SetClassLoaderClass();
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700989 CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100990 SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700991
jeffhao8cd6dda2012-02-22 10:15:34 -0800992 // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
Ian Rogers23435d02012-09-24 11:23:12 -0700993 // java.lang.StackTraceElement as a convenience.
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100994 SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100995 SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
Brian Carlstromf3632832014-05-20 15:36:53 -0700996 FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100997 SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
998 FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
999 SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
Brian Carlstromf3632832014-05-20 15:36:53 -07001000 FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00001001 SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
1002 FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
Elliott Hughesd8ddfd52011-08-15 14:32:53 -07001003
Mathieu Chartiercdca4762016-04-28 09:44:54 -07001004 // Create conflict tables that depend on the class linker.
1005 runtime->FixupConflictTables();
1006
Ian Rogers98379392014-02-24 16:53:16 -08001007 FinishInit(self);
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001008
Brian Carlstroma004aa92012-02-08 18:05:09 -08001009 VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08001010
1011 return true;
Brian Carlstroma663ea52011-08-19 23:33:41 -07001012}
1013
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001014static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1015 REQUIRES_SHARED(Locks::mutator_lock_) {
1016 // Find String.<init> -> StringFactory bindings.
1017 ObjPtr<mirror::Class> string_factory_class =
1018 class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1019 CHECK(string_factory_class != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001020 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001021 WellKnownClasses::InitStringInit(string_class, string_factory_class);
1022 // Update the primordial thread.
1023 self->InitStringEntryPoints();
1024}
1025
Ian Rogers98379392014-02-24 16:53:16 -08001026void ClassLinker::FinishInit(Thread* self) {
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08001027 VLOG(startup) << "ClassLinker::FinishInit entering";
Brian Carlstrom16192862011-09-12 17:50:06 -07001028
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001029 CreateStringInitBindings(self, this);
1030
Brian Carlstrom16192862011-09-12 17:50:06 -07001031 // Let the heap know some key offsets into java.lang.ref instances
Elliott Hughes20cde902011-10-04 17:37:27 -07001032 // Note: we hard code the field indexes here rather than using FindInstanceField
Brian Carlstrom16192862011-09-12 17:50:06 -07001033 // as the types of the field can't be resolved prior to the runtime being
1034 // fully initialized
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001035 StackHandleScope<3> hs(self);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001036 Handle<mirror::Class> java_lang_ref_Reference =
1037 hs.NewHandle(GetClassRoot<mirror::Reference>(this));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001038 Handle<mirror::Class> java_lang_ref_FinalizerReference =
1039 hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08001040
Mathieu Chartierc7853442015-03-27 14:35:38 -07001041 ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001042 CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1043 CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001044
Mathieu Chartierc7853442015-03-27 14:35:38 -07001045 ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001046 CHECK_STREQ(queue->GetName(), "queue");
1047 CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001048
Mathieu Chartierc7853442015-03-27 14:35:38 -07001049 ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001050 CHECK_STREQ(queueNext->GetName(), "queueNext");
1051 CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001052
Mathieu Chartierc7853442015-03-27 14:35:38 -07001053 ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001054 CHECK_STREQ(referent->GetName(), "referent");
1055 CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001056
Mathieu Chartierc7853442015-03-27 14:35:38 -07001057 ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001058 CHECK_STREQ(zombie->GetName(), "zombie");
1059 CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001060
Brian Carlstroma663ea52011-08-19 23:33:41 -07001061 // ensure all class_roots_ are initialized
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001062 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
Brian Carlstroma663ea52011-08-19 23:33:41 -07001063 ClassRoot class_root = static_cast<ClassRoot>(i);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001064 ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07001065 CHECK(klass != nullptr);
1066 DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
Brian Carlstroma663ea52011-08-19 23:33:41 -07001067 // note SetClassRoot does additional validation.
1068 // if possible add new checks there to catch errors early
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001069 }
1070
Vladimir Marko02610552018-06-04 14:38:00 +01001071 CHECK(GetArrayIfTable() != nullptr);
Elliott Hughes92f14b22011-10-06 12:29:54 -07001072
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001073 // disable the slow paths in FindClass and CreatePrimitiveClass now
1074 // that Object, Class, and Object[] are setup
1075 init_done_ = true;
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001076
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001077 // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1078 // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1079 // ensure that the class will be initialized.
1080 if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
Andreas Gampee0bbab92019-07-25 12:28:22 -07001081 verifier::ClassVerifier::Init(this); // Need to prepare the verifier.
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001082
1083 ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1084 if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1085 // Strange, but don't crash.
1086 LOG(WARNING) << "Could not prepare StackOverflowError.";
1087 self->ClearException();
1088 }
1089 }
1090
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08001091 VLOG(startup) << "ClassLinker::FinishInit exiting";
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001092}
1093
Vladimir Markodcfcce42018-06-27 10:00:28 +00001094void ClassLinker::RunRootClinits(Thread* self) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001095 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1096 ObjPtr<mirror::Class> c = GetClassRoot(ClassRoot(i), this);
Elliott Hughes2a20cfd2011-09-23 19:30:41 -07001097 if (!c->IsArrayClass() && !c->IsPrimitive()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001098 StackHandleScope<1> hs(self);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001099 Handle<mirror::Class> h_class(hs.NewHandle(c));
David Srbecky08110ef2020-05-20 19:33:43 +01001100 if (!EnsureInitialized(self, h_class, true, true)) {
1101 LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1102 << ": " << self->GetException()->Dump();
1103 }
Vladimir Markodcfcce42018-06-27 10:00:28 +00001104 } else {
1105 DCHECK(c->IsInitialized());
Elliott Hughes2a20cfd2011-09-23 19:30:41 -07001106 }
1107 }
1108}
1109
Vladimir Marko43354742021-02-03 15:37:01 +00001110static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1111 PointerSize pointer_size,
1112 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1113 REQUIRES_SHARED(Locks::mutator_lock_) {
1114 ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1115 DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1116 for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
Vladimir Markob4bd92f2021-07-05 12:18:26 +01001117 std::string_view name = virtual_methods[i].GetNameView();
Vladimir Marko43354742021-02-03 15:37:01 +00001118 virtual_method_hashes[i] = ComputeModifiedUtf8Hash(name);
1119 }
1120}
1121
Jeff Haodcdc85b2015-12-04 14:06:18 -08001122struct TrampolineCheckData {
1123 const void* quick_resolution_trampoline;
1124 const void* quick_imt_conflict_trampoline;
1125 const void* quick_generic_jni_trampoline;
1126 const void* quick_to_interpreter_bridge_trampoline;
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001127 const void* nterp_trampoline;
Andreas Gampe542451c2016-07-26 09:02:02 -07001128 PointerSize pointer_size;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001129 ArtMethod* m;
1130 bool error;
1131};
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001132
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001133bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1134 VLOG(startup) << __FUNCTION__ << " entering";
Brian Carlstroma663ea52011-08-19 23:33:41 -07001135 CHECK(!init_done_);
1136
Mathieu Chartierdaaf3262015-03-24 13:30:28 -07001137 Runtime* const runtime = Runtime::Current();
1138 Thread* const self = Thread::Current();
1139 gc::Heap* const heap = runtime->GetHeap();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001140 std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1141 CHECK(!spaces.empty());
Vladimir Marko024d69f2019-06-13 10:52:32 +01001142 const ImageHeader& image_header = spaces[0]->GetImageHeader();
1143 uint32_t pointer_size_unchecked = image_header.GetPointerSizeUnchecked();
Andreas Gampe542451c2016-07-26 09:02:02 -07001144 if (!ValidPointerSize(pointer_size_unchecked)) {
1145 *error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001146 return false;
1147 }
Vladimir Marko3364d182019-03-13 13:55:01 +00001148 image_pointer_size_ = image_header.GetPointerSize();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001149 if (!runtime->IsAotCompiler()) {
1150 // Only the Aot compiler supports having an image with a different pointer size than the
1151 // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1152 // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
Andreas Gampe542451c2016-07-26 09:02:02 -07001153 if (image_pointer_size_ != kRuntimePointerSize) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001154 *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
Andreas Gampe542451c2016-07-26 09:02:02 -07001155 static_cast<size_t>(image_pointer_size_),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001156 sizeof(void*));
1157 return false;
1158 }
1159 }
Vladimir Marko3364d182019-03-13 13:55:01 +00001160 DCHECK(!runtime->HasResolutionMethod());
1161 runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1162 runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1163 runtime->SetImtUnimplementedMethod(
1164 image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1165 runtime->SetCalleeSaveMethod(
1166 image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1167 CalleeSaveType::kSaveAllCalleeSaves);
1168 runtime->SetCalleeSaveMethod(
1169 image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1170 CalleeSaveType::kSaveRefsOnly);
1171 runtime->SetCalleeSaveMethod(
1172 image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1173 CalleeSaveType::kSaveRefsAndArgs);
1174 runtime->SetCalleeSaveMethod(
1175 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1176 CalleeSaveType::kSaveEverything);
1177 runtime->SetCalleeSaveMethod(
1178 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1179 CalleeSaveType::kSaveEverythingForClinit);
1180 runtime->SetCalleeSaveMethod(
1181 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1182 CalleeSaveType::kSaveEverythingForSuspendCheck);
1183
Jeff Haodcdc85b2015-12-04 14:06:18 -08001184 std::vector<const OatFile*> oat_files =
1185 runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1186 DCHECK(!oat_files.empty());
1187 const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001188 jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
Vladimir Markofa458ac2020-02-12 14:08:07 +00001189 jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001190 quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1191 quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1192 quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1193 quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001194 nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001195 if (kIsDebugBuild) {
1196 // Check that the other images use the same trampoline.
1197 for (size_t i = 1; i < oat_files.size(); ++i) {
1198 const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001199 const void* ith_jni_dlsym_lookup_trampoline_ =
1200 ith_oat_header.GetJniDlsymLookupTrampoline();
Vladimir Markofa458ac2020-02-12 14:08:07 +00001201 const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1202 ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001203 const void* ith_quick_resolution_trampoline =
1204 ith_oat_header.GetQuickResolutionTrampoline();
1205 const void* ith_quick_imt_conflict_trampoline =
1206 ith_oat_header.GetQuickImtConflictTrampoline();
1207 const void* ith_quick_generic_jni_trampoline =
1208 ith_oat_header.GetQuickGenericJniTrampoline();
1209 const void* ith_quick_to_interpreter_bridge_trampoline =
1210 ith_oat_header.GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001211 const void* ith_nterp_trampoline =
1212 ith_oat_header.GetNterpTrampoline();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001213 if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
Vladimir Markofa458ac2020-02-12 14:08:07 +00001214 ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
Vladimir Marko7dac8642019-11-06 17:09:30 +00001215 ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
Jeff Haodcdc85b2015-12-04 14:06:18 -08001216 ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1217 ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001218 ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1219 ith_nterp_trampoline != nterp_trampoline_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001220 // Make sure that all methods in this image do not contain those trampolines as
1221 // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1222 TrampolineCheckData data;
1223 data.error = false;
1224 data.pointer_size = GetImagePointerSize();
1225 data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1226 data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1227 data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1228 data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001229 data.nterp_trampoline = ith_nterp_trampoline;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001230 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
Andreas Gampe0c183382017-07-13 22:26:24 -07001231 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1232 if (obj->IsClass()) {
1233 ObjPtr<mirror::Class> klass = obj->AsClass();
1234 for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1235 const void* entrypoint =
1236 m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1237 if (entrypoint == data.quick_resolution_trampoline ||
1238 entrypoint == data.quick_imt_conflict_trampoline ||
1239 entrypoint == data.quick_generic_jni_trampoline ||
1240 entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1241 data.m = &m;
1242 data.error = true;
1243 return;
1244 }
1245 }
1246 }
1247 };
1248 spaces[i]->GetLiveBitmap()->Walk(visitor);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001249 if (data.error) {
1250 ArtMethod* m = data.m;
David Sehr709b0702016-10-13 09:12:37 -07001251 LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001252 *error_msg = "Found an ArtMethod with a bad entrypoint";
1253 return false;
1254 }
1255 }
1256 }
1257 }
Brian Carlstrom58ae9412011-10-04 00:56:06 -07001258
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001259 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
Vladimir Markod7e9bbf2019-03-28 13:18:57 +00001260 ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
Vladimir Marko024d69f2019-06-13 10:52:32 +01001261 image_header.GetImageRoot(ImageHeader::kClassRoots)));
Vladimir Markof75613c2018-06-05 12:51:04 +01001262 DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
Mathieu Chartier02b6a782012-10-26 13:51:26 -07001263
Vladimir Marko024d69f2019-06-13 10:52:32 +01001264 DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1265 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1266 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1267 image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1268 runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1269 DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07001270
Vladimir Markod1908512018-11-22 14:57:28 +00001271 for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001272 // Boot class loader, use a null handle.
1273 std::vector<std::unique_ptr<const DexFile>> dex_files;
Vladimir Markod1908512018-11-22 14:57:28 +00001274 if (!AddImageSpace(spaces[i],
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001275 ScopedNullHandle<mirror::ClassLoader>(),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001276 /*out*/&dex_files,
1277 error_msg)) {
1278 return false;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001279 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001280 // Append opened dex files at the end.
1281 boot_dex_files_.insert(boot_dex_files_.end(),
1282 std::make_move_iterator(dex_files.begin()),
1283 std::make_move_iterator(dex_files.end()));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001284 }
Mathieu Chartierbe8303d2017-08-17 17:39:39 -07001285 for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
1286 OatDexFile::MadviseDexFile(*dex_file, MadviseState::kMadviseStateAtLoad);
1287 }
Vladimir Marko43354742021-02-03 15:37:01 +00001288 InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1289 image_pointer_size_,
1290 ArrayRef<uint32_t>(object_virtual_method_hashes_));
Ian Rogers98379392014-02-24 16:53:16 -08001291 FinishInit(self);
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001292
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001293 VLOG(startup) << __FUNCTION__ << " exiting";
1294 return true;
1295}
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08001296
Vladimir Marko4433c432018-12-04 14:57:47 +00001297void ClassLinker::AddExtraBootDexFiles(
1298 Thread* self,
1299 std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1300 for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08001301 AppendToBootClassPath(self, dex_file.get());
Orion Hodson771708f2021-01-06 15:45:16 +00001302 if (kIsDebugBuild) {
1303 for (const auto& boot_dex_file : boot_dex_files_) {
1304 DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1305 }
1306 }
Vladimir Marko4433c432018-12-04 14:57:47 +00001307 boot_dex_files_.push_back(std::move(dex_file));
1308 }
1309}
1310
Jeff Hao5872d7c2016-04-27 11:07:41 -07001311bool ClassLinker::IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001312 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001313 return class_loader == nullptr ||
Mathieu Chartier0795f232016-09-27 18:43:30 -07001314 soa.Decode<mirror::Class>(WellKnownClasses::java_lang_BootClassLoader) ==
1315 class_loader->GetClass();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001316}
1317
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03001318class CHAOnDeleteUpdateClassVisitor {
1319 public:
1320 explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1321 : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1322 pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1323 self_(Thread::Current()) {}
1324
1325 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1326 // This class is going to be unloaded. Tell CHA about it.
1327 cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1328 return true;
1329 }
1330 private:
1331 const LinearAlloc* allocator_;
1332 const ClassHierarchyAnalysis* cha_;
1333 const PointerSize pointer_size_;
1334 const Thread* self_;
1335};
1336
Chris Wailes0c61be42018-09-26 17:27:34 -07001337/*
Vladimir Marko8e05f092019-06-10 11:10:38 +01001338 * A class used to ensure that all references to strings interned in an AppImage have been
1339 * properly recorded in the interned references list, and is only ever run in debug mode.
Chris Wailes0c61be42018-09-26 17:27:34 -07001340 */
Vladimir Marko8e05f092019-06-10 11:10:38 +01001341class CountInternedStringReferencesVisitor {
Chang Xingba17dbd2017-06-28 21:27:56 +00001342 public:
Vladimir Marko8e05f092019-06-10 11:10:38 +01001343 CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1344 const InternTable::UnorderedSet& image_interns)
1345 : space_(space),
1346 image_interns_(image_interns),
1347 count_(0u) {}
Chris Wailes0c61be42018-09-26 17:27:34 -07001348
Chris Wailes0c61be42018-09-26 17:27:34 -07001349 void TestObject(ObjPtr<mirror::Object> referred_obj) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001350 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001351 if (referred_obj != nullptr &&
1352 space_.HasAddress(referred_obj.Ptr()) &&
1353 referred_obj->IsString()) {
1354 ObjPtr<mirror::String> referred_str = referred_obj->AsString();
Vladimir Marko8e05f092019-06-10 11:10:38 +01001355 auto it = image_interns_.find(GcRoot<mirror::String>(referred_str));
1356 if (it != image_interns_.end() && it->Read() == referred_str) {
1357 ++count_;
Chris Wailesfbeef462018-10-19 14:16:35 -07001358 }
Chang Xingba17dbd2017-06-28 21:27:56 +00001359 }
Chang Xingba17dbd2017-06-28 21:27:56 +00001360 }
1361
Chris Wailes0c61be42018-09-26 17:27:34 -07001362 void VisitRootIfNonNull(
Chang Xingba17dbd2017-06-28 21:27:56 +00001363 mirror::CompressedReference<mirror::Object>* root) const
1364 REQUIRES_SHARED(Locks::mutator_lock_) {
1365 if (!root->IsNull()) {
1366 VisitRoot(root);
1367 }
1368 }
1369
Chris Wailes0c61be42018-09-26 17:27:34 -07001370 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001371 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001372 TestObject(root->AsMirrorPtr());
Chang Xingba17dbd2017-06-28 21:27:56 +00001373 }
1374
1375 // Visit Class Fields
Chris Wailes0c61be42018-09-26 17:27:34 -07001376 void operator()(ObjPtr<mirror::Object> obj,
1377 MemberOffset offset,
1378 bool is_static ATTRIBUTE_UNUSED) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001379 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko8e05f092019-06-10 11:10:38 +01001380 // References within image or across images don't need a read barrier.
1381 ObjPtr<mirror::Object> referred_obj =
1382 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1383 TestObject(referred_obj);
Chang Xingba17dbd2017-06-28 21:27:56 +00001384 }
1385
1386 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1387 ObjPtr<mirror::Reference> ref) const
1388 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Vladimir Marko8e05f092019-06-10 11:10:38 +01001389 operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
Chang Xingba17dbd2017-06-28 21:27:56 +00001390 }
1391
Vladimir Marko8e05f092019-06-10 11:10:38 +01001392 size_t GetCount() const {
1393 return count_;
1394 }
1395
1396 private:
Chris Wailes0c61be42018-09-26 17:27:34 -07001397 const gc::space::ImageSpace& space_;
Vladimir Marko8e05f092019-06-10 11:10:38 +01001398 const InternTable::UnorderedSet& image_interns_;
1399 mutable size_t count_; // Modified from the `const` callbacks.
Chang Xingba17dbd2017-06-28 21:27:56 +00001400};
1401
Chris Wailes0c61be42018-09-26 17:27:34 -07001402/*
Vladimir Marko8e05f092019-06-10 11:10:38 +01001403 * This function counts references to strings interned in the AppImage.
1404 * This is used in debug build to check against the number of the recorded references.
Chris Wailes0c61be42018-09-26 17:27:34 -07001405 */
Vladimir Marko8e05f092019-06-10 11:10:38 +01001406size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1407 const InternTable::UnorderedSet& image_interns)
1408 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001409 const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1410 const ImageHeader& image_header = space.GetImageHeader();
1411 const uint8_t* target_base = space.GetMemMap()->Begin();
1412 const ImageSection& objects_section = image_header.GetObjectsSection();
Chris Wailesfbeef462018-10-19 14:16:35 -07001413
1414 auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1415 auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
Chris Wailes0c61be42018-09-26 17:27:34 -07001416
Vladimir Marko8e05f092019-06-10 11:10:38 +01001417 CountInternedStringReferencesVisitor visitor(space, image_interns);
Chris Wailes0c61be42018-09-26 17:27:34 -07001418 bitmap->VisitMarkedRange(objects_begin,
1419 objects_end,
1420 [&space, &visitor](mirror::Object* obj)
1421 REQUIRES_SHARED(Locks::mutator_lock_) {
1422 if (space.HasAddress(obj)) {
1423 if (obj->IsDexCache()) {
Chris Wailesfbeef462018-10-19 14:16:35 -07001424 obj->VisitReferences</* kVisitNativeRoots= */ true,
1425 kVerifyNone,
1426 kWithoutReadBarrier>(visitor, visitor);
Chris Wailes0c61be42018-09-26 17:27:34 -07001427 } else {
1428 // Don't visit native roots for non-dex-cache as they can't contain
1429 // native references to strings. This is verified during compilation
1430 // by ImageWriter::VerifyNativeGCRootInvariants.
Chris Wailesfbeef462018-10-19 14:16:35 -07001431 obj->VisitReferences</* kVisitNativeRoots= */ false,
1432 kVerifyNone,
1433 kWithoutReadBarrier>(visitor, visitor);
Chris Wailes0c61be42018-09-26 17:27:34 -07001434 }
1435 }
1436 });
Vladimir Marko8e05f092019-06-10 11:10:38 +01001437 return visitor.GetCount();
1438}
1439
1440template <typename Visitor>
1441static void VisitInternedStringReferences(
1442 gc::space::ImageSpace* space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001443 const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1444 const uint8_t* target_base = space->Begin();
1445 const ImageSection& sro_section =
1446 space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1447 const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1448
1449 VLOG(image)
1450 << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1451 << num_string_offsets;
1452
1453 const auto* sro_base =
1454 reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1455
1456 for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1457 uint32_t base_offset = sro_base[offset_index].first;
1458
David Srbecky86d6cd52020-12-02 18:13:10 +00001459 uint32_t raw_member_offset = sro_base[offset_index].second;
1460 DCHECK_ALIGNED(base_offset, 2);
1461 DCHECK_ALIGNED(raw_member_offset, 2);
Vladimir Marko8e05f092019-06-10 11:10:38 +01001462
David Srbecky86d6cd52020-12-02 18:13:10 +00001463 ObjPtr<mirror::Object> obj_ptr =
1464 reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1465 MemberOffset member_offset(raw_member_offset);
1466 ObjPtr<mirror::String> referred_string =
1467 obj_ptr->GetFieldObject<mirror::String,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001468 kVerifyNone,
David Srbecky86d6cd52020-12-02 18:13:10 +00001469 kWithoutReadBarrier,
1470 /* kIsVolatile= */ false>(member_offset);
1471 DCHECK(referred_string != nullptr);
1472
1473 ObjPtr<mirror::String> visited = visitor(referred_string);
1474 if (visited != referred_string) {
1475 obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1476 /* kCheckTransaction= */ false,
1477 kVerifyNone,
1478 /* kIsVolatile= */ false>(member_offset, visited);
Vladimir Marko8e05f092019-06-10 11:10:38 +01001479 }
1480 }
1481}
1482
1483static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1484 REQUIRES_SHARED(Locks::mutator_lock_) {
1485 InternTable::UnorderedSet image_interns;
1486 const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1487 if (section.Size() > 0) {
1488 size_t read_count;
1489 const uint8_t* data = space->Begin() + section.Offset();
1490 InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1491 image_set.swap(image_interns);
1492 }
1493 size_t num_recorded_refs = 0u;
1494 VisitInternedStringReferences(
1495 space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001496 [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1497 REQUIRES_SHARED(Locks::mutator_lock_) {
1498 auto it = image_interns.find(GcRoot<mirror::String>(str));
1499 CHECK(it != image_interns.end());
1500 CHECK(it->Read() == str);
1501 ++num_recorded_refs;
1502 return str;
1503 });
1504 size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1505 CHECK_EQ(num_recorded_refs, num_found_refs);
Chris Wailes0c61be42018-09-26 17:27:34 -07001506}
1507
Andreas Gampe2af99022017-04-25 08:32:59 -07001508// new_class_set is the set of classes that were read from the class table section in the image.
1509// If there was no class table section, it is null.
1510// Note: using a class here to avoid having to make ClassLinker internals public.
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001511class AppImageLoadingHelper {
Andreas Gampe2af99022017-04-25 08:32:59 -07001512 public:
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001513 static void Update(
Andreas Gampe2af99022017-04-25 08:32:59 -07001514 ClassLinker* class_linker,
1515 gc::space::ImageSpace* space,
1516 Handle<mirror::ClassLoader> class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001517 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
Andreas Gampe2af99022017-04-25 08:32:59 -07001518 REQUIRES(!Locks::dex_lock_)
1519 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001520
Chris Wailesfbeef462018-10-19 14:16:35 -07001521 static void HandleAppImageStrings(gc::space::ImageSpace* space)
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001522 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe2af99022017-04-25 08:32:59 -07001523};
1524
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001525void AppImageLoadingHelper::Update(
Andreas Gampe2af99022017-04-25 08:32:59 -07001526 ClassLinker* class_linker,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001527 gc::space::ImageSpace* space,
1528 Handle<mirror::ClassLoader> class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001529 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
Andreas Gampe2af99022017-04-25 08:32:59 -07001530 REQUIRES(!Locks::dex_lock_)
1531 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes23866362018-08-22 16:16:58 -07001532 ScopedTrace app_image_timing("AppImage:Updating");
1533
Vladimir Marko8e05f092019-06-10 11:10:38 +01001534 if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1535 // In debug build, verify the string references before applying
1536 // the Runtime::LoadAppImageStartupCache() option.
1537 VerifyInternedStringReferences(space);
1538 }
1539
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001540 Thread* const self = Thread::Current();
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001541 Runtime* const runtime = Runtime::Current();
1542 gc::Heap* const heap = runtime->GetHeap();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001543 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001544 {
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001545 // Register dex caches with the class loader.
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001546 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Alex Lighta9bbc082019-11-14 14:51:41 -08001547 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001548 const DexFile* const dex_file = dex_cache->GetDexFile();
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001549 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08001550 WriterMutexLock mu2(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08001551 CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
Andreas Gampe2af99022017-04-25 08:32:59 -07001552 class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001553 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001554 }
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001555 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001556
Mathieu Chartier0933cc52018-03-23 14:25:08 -07001557 if (ClassLinker::kAppImageMayContainStrings) {
Chris Wailesfbeef462018-10-19 14:16:35 -07001558 HandleAppImageStrings(space);
Chang Xingba17dbd2017-06-28 21:27:56 +00001559 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001560
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001561 if (kVerifyArtMethodDeclaringClasses) {
Chris Wailes23866362018-08-22 16:16:58 -07001562 ScopedTrace timing("AppImage:VerifyDeclaringClasses");
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001563 ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001564 gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1565 header.VisitPackedArtMethods([&](ArtMethod& method)
1566 REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1567 ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1568 if (klass != nullptr) {
1569 CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1570 }
1571 }, space->Begin(), kRuntimePointerSize);
Mathieu Chartier03c1dd92016-03-07 16:13:54 -08001572 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001573}
1574
Chris Wailesfbeef462018-10-19 14:16:35 -07001575void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001576 // Iterate over the string reference offsets stored in the image and intern
1577 // the strings they point to.
1578 ScopedTrace timing("AppImage:InternString");
1579
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001580 Runtime* const runtime = Runtime::Current();
1581 InternTable* const intern_table = runtime->GetInternTable();
1582
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001583 // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1584 // for faster lookup.
1585 // TODO: Optimize with a bitmap or bloom filter
1586 SafeMap<mirror::String*, mirror::String*> intern_remap;
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001587 auto func = [&](InternTable::UnorderedSet& interns)
Mathieu Chartier41c08082018-10-31 11:50:26 -07001588 REQUIRES_SHARED(Locks::mutator_lock_)
1589 REQUIRES(Locks::intern_table_lock_) {
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001590 const size_t non_boot_image_strings = intern_table->CountInterns(
1591 /*visit_boot_images=*/false,
1592 /*visit_non_boot_images=*/true);
Chris Wailesfbeef462018-10-19 14:16:35 -07001593 VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001594 VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1595 // Visit the smaller of the two sets to compute the intersection.
1596 if (interns.size() < non_boot_image_strings) {
1597 for (auto it = interns.begin(); it != interns.end(); ) {
1598 ObjPtr<mirror::String> string = it->Read();
1599 ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1600 if (existing == nullptr) {
1601 existing = intern_table->LookupStrongLocked(string);
1602 }
1603 if (existing != nullptr) {
1604 intern_remap.Put(string.Ptr(), existing.Ptr());
1605 it = interns.erase(it);
1606 } else {
1607 ++it;
1608 }
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001609 }
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001610 } else {
1611 intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1612 REQUIRES_SHARED(Locks::mutator_lock_)
1613 REQUIRES(Locks::intern_table_lock_) {
1614 auto it = interns.find(root);
1615 if (it != interns.end()) {
1616 ObjPtr<mirror::String> existing = root.Read();
1617 intern_remap.Put(it->Read(), existing.Ptr());
1618 it = interns.erase(it);
1619 }
1620 }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1621 }
David Srbecky346fd962020-07-27 16:51:00 +01001622 // Consistency check to ensure correctness.
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001623 if (kIsDebugBuild) {
1624 for (GcRoot<mirror::String>& root : interns) {
1625 ObjPtr<mirror::String> string = root.Read();
1626 CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1627 CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001628 }
1629 }
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001630 };
Vladimir Marko8e05f092019-06-10 11:10:38 +01001631 intern_table->AddImageStringsToTable(space, func);
1632 if (!intern_remap.empty()) {
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001633 VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
Vladimir Marko8e05f092019-06-10 11:10:38 +01001634 VisitInternedStringReferences(
1635 space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001636 [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1637 auto it = intern_remap.find(str.Ptr());
1638 if (it != intern_remap.end()) {
1639 return ObjPtr<mirror::String>(it->second);
1640 }
1641 return str;
1642 });
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001643 }
1644}
1645
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001646static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1647 const char* location,
1648 std::string* error_msg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001649 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001650 DCHECK(error_msg != nullptr);
1651 std::unique_ptr<const DexFile> dex_file;
Andreas Gampeb40d3612018-06-26 15:49:42 -07001652 const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001653 if (oat_dex_file == nullptr) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001654 return std::unique_ptr<const DexFile>();
1655 }
1656 std::string inner_error_msg;
1657 dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1658 if (dex_file == nullptr) {
1659 *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1660 location,
1661 oat_file->GetLocation().c_str(),
1662 inner_error_msg.c_str());
1663 return std::unique_ptr<const DexFile>();
1664 }
1665
1666 if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1667 *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1668 location,
1669 dex_file->GetLocationChecksum(),
1670 oat_dex_file->GetDexFileLocationChecksum());
1671 return std::unique_ptr<const DexFile>();
1672 }
1673 return dex_file;
1674}
1675
1676bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1677 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1678 std::string* error_msg) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07001679 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001680 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001681 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001682 DCHECK(dex_caches_object != nullptr);
Vladimir Marko4617d582019-03-28 13:48:31 +00001683 ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001684 dex_caches_object->AsObjectArray<mirror::DexCache>();
1685 const OatFile* oat_file = space->GetOatFile();
Alex Lighta9bbc082019-11-14 14:51:41 -08001686 for (auto dex_cache : dex_caches->Iterate()) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001687 std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1688 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1689 dex_file_location.c_str(),
1690 error_msg);
1691 if (dex_file == nullptr) {
1692 return false;
1693 }
1694 dex_cache->SetDexFile(dex_file.get());
1695 out_dex_files->push_back(std::move(dex_file));
1696 }
1697 return true;
1698}
1699
Andreas Gampe0793bec2016-12-01 11:37:33 -08001700// Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1701// together and caches some intermediate results.
Orion Hodson5880c772020-07-28 20:12:08 +01001702class ImageChecker final {
Andreas Gampe0793bec2016-12-01 11:37:33 -08001703 public:
1704 static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker)
1705 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodson5880c772020-07-28 20:12:08 +01001706 ImageChecker ic(heap, class_linker);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001707 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1708 DCHECK(obj != nullptr);
1709 CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj;
1710 CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
1711 if (obj->IsClass()) {
1712 auto klass = obj->AsClass();
1713 for (ArtField& field : klass->GetIFields()) {
1714 CHECK_EQ(field.GetDeclaringClass(), klass);
1715 }
1716 for (ArtField& field : klass->GetSFields()) {
1717 CHECK_EQ(field.GetDeclaringClass(), klass);
1718 }
Orion Hodson5880c772020-07-28 20:12:08 +01001719 const PointerSize pointer_size = ic.pointer_size_;
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001720 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
Orion Hodson5880c772020-07-28 20:12:08 +01001721 ic.CheckArtMethod(&m, klass);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001722 }
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001723 ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
Andreas Gampe1c158a02017-07-13 17:26:19 -07001724 if (vtable != nullptr) {
Orion Hodson5880c772020-07-28 20:12:08 +01001725 ic.CheckArtMethodPointerArray(vtable, nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001726 }
1727 if (klass->ShouldHaveImt()) {
1728 ImTable* imt = klass->GetImt(pointer_size);
1729 for (size_t i = 0; i < ImTable::kSize; ++i) {
Orion Hodson5880c772020-07-28 20:12:08 +01001730 ic.CheckArtMethod(imt->Get(i, pointer_size), nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001731 }
1732 }
1733 if (klass->ShouldHaveEmbeddedVTable()) {
1734 for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
Orion Hodson5880c772020-07-28 20:12:08 +01001735 ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001736 }
1737 }
Vladimir Markoc524e9e2019-03-26 10:54:50 +00001738 ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
Andreas Gampe1c158a02017-07-13 17:26:19 -07001739 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
1740 if (iftable->GetMethodArrayCount(i) > 0) {
Orion Hodson5880c772020-07-28 20:12:08 +01001741 ic.CheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001742 }
1743 }
1744 }
1745 };
1746 heap->VisitObjects(visitor);
Andreas Gampe0793bec2016-12-01 11:37:33 -08001747 }
1748
Andreas Gampe0793bec2016-12-01 11:37:33 -08001749 private:
Orion Hodson5880c772020-07-28 20:12:08 +01001750 ImageChecker(gc::Heap* heap, ClassLinker* class_linker)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001751 : spaces_(heap->GetBootImageSpaces()),
1752 pointer_size_(class_linker->GetImagePointerSize()) {
1753 space_begin_.reserve(spaces_.size());
1754 method_sections_.reserve(spaces_.size());
1755 runtime_method_sections_.reserve(spaces_.size());
1756 for (gc::space::ImageSpace* space : spaces_) {
1757 space_begin_.push_back(space->Begin());
1758 auto& header = space->GetImageHeader();
1759 method_sections_.push_back(&header.GetMethodsSection());
1760 runtime_method_sections_.push_back(&header.GetRuntimeMethodsSection());
1761 }
1762 }
1763
Orion Hodson5880c772020-07-28 20:12:08 +01001764 void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001765 REQUIRES_SHARED(Locks::mutator_lock_) {
1766 if (m->IsRuntimeMethod()) {
1767 ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked();
1768 CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
1769 } else if (m->IsCopied()) {
1770 CHECK(m->GetDeclaringClass() != nullptr) << m->PrettyMethod();
1771 } else if (expected_class != nullptr) {
1772 CHECK_EQ(m->GetDeclaringClassUnchecked(), expected_class) << m->PrettyMethod();
1773 }
1774 if (!spaces_.empty()) {
1775 bool contains = false;
1776 for (size_t i = 0; !contains && i != space_begin_.size(); ++i) {
1777 const size_t offset = reinterpret_cast<uint8_t*>(m) - space_begin_[i];
1778 contains = method_sections_[i]->Contains(offset) ||
1779 runtime_method_sections_[i]->Contains(offset);
1780 }
1781 CHECK(contains) << m << " not found";
1782 }
1783 }
1784
Orion Hodson5880c772020-07-28 20:12:08 +01001785 void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,
1786 ObjPtr<mirror::Class> expected_class)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001787 REQUIRES_SHARED(Locks::mutator_lock_) {
1788 CHECK(arr != nullptr);
1789 for (int32_t j = 0; j < arr->GetLength(); ++j) {
1790 auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size_);
1791 // expected_class == null means we are a dex cache.
1792 if (expected_class != nullptr) {
1793 CHECK(method != nullptr);
1794 }
1795 if (method != nullptr) {
Orion Hodson5880c772020-07-28 20:12:08 +01001796 CheckArtMethod(method, expected_class);
Andreas Gampe0793bec2016-12-01 11:37:33 -08001797 }
1798 }
1799 }
1800
Andreas Gampe0793bec2016-12-01 11:37:33 -08001801 const std::vector<gc::space::ImageSpace*>& spaces_;
1802 const PointerSize pointer_size_;
1803
1804 // Cached sections from the spaces.
1805 std::vector<const uint8_t*> space_begin_;
1806 std::vector<const ImageSection*> method_sections_;
1807 std::vector<const ImageSection*> runtime_method_sections_;
1808};
1809
Andreas Gampebe7af222017-07-25 09:57:28 -07001810static void VerifyAppImage(const ImageHeader& header,
1811 const Handle<mirror::ClassLoader>& class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001812 ClassTable* class_table,
1813 gc::space::ImageSpace* space)
Andreas Gampebe7af222017-07-25 09:57:28 -07001814 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001815 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1816 ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
1817 if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
1818 CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
1819 << mirror::Class::PrettyClass(klass);
1820 }
1821 }, space->Begin(), kRuntimePointerSize);
Andreas Gampebe7af222017-07-25 09:57:28 -07001822 {
1823 // Verify that all direct interfaces of classes in the class table are also resolved.
1824 std::vector<ObjPtr<mirror::Class>> classes;
1825 auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
1826 REQUIRES_SHARED(Locks::mutator_lock_) {
1827 if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
1828 classes.push_back(klass);
1829 }
1830 return true;
1831 };
1832 class_table->Visit(verify_direct_interfaces_in_table);
Andreas Gampebe7af222017-07-25 09:57:28 -07001833 for (ObjPtr<mirror::Class> klass : classes) {
1834 for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
Vladimir Markob10668c2021-06-10 09:52:53 +01001835 CHECK(klass->GetDirectInterface(i) != nullptr)
Andreas Gampebe7af222017-07-25 09:57:28 -07001836 << klass->PrettyDescriptor() << " iface #" << i;
1837 }
1838 }
1839 }
Andreas Gampebe7af222017-07-25 09:57:28 -07001840}
1841
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001842bool ClassLinker::AddImageSpace(
1843 gc::space::ImageSpace* space,
1844 Handle<mirror::ClassLoader> class_loader,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001845 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1846 std::string* error_msg) {
1847 DCHECK(out_dex_files != nullptr);
1848 DCHECK(error_msg != nullptr);
1849 const uint64_t start_time = NanoTime();
Andreas Gampefa4333d2017-02-14 11:10:34 -08001850 const bool app_image = class_loader != nullptr;
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001851 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001852 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001853 DCHECK(dex_caches_object != nullptr);
1854 Runtime* const runtime = Runtime::Current();
1855 gc::Heap* const heap = runtime->GetHeap();
1856 Thread* const self = Thread::Current();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001857 // Check that the image is what we are expecting.
1858 if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
1859 *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
1860 static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
1861 image_pointer_size_);
1862 return false;
1863 }
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001864 size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
1865 if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
1866 *error_msg = StringPrintf("Expected %zu image roots but got %d",
1867 expected_image_roots,
1868 header.GetImageRoots()->GetLength());
1869 return false;
1870 }
1871 StackHandleScope<3> hs(self);
1872 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
1873 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
1874 Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
1875 header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001876 MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
Vladimir Markof75613c2018-06-05 12:51:04 +01001877 app_image ? header.GetImageRoot(ImageHeader::kAppImageClassLoader)->AsClassLoader()
1878 : nullptr));
Andreas Gampefa4333d2017-02-14 11:10:34 -08001879 DCHECK(class_roots != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001880 if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001881 *error_msg = StringPrintf("Expected %d class roots but got %d",
1882 class_roots->GetLength(),
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001883 static_cast<int32_t>(ClassRoot::kMax));
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001884 return false;
1885 }
1886 // Check against existing class roots to make sure they match the ones in the boot image.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001887 ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
1888 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1889 if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001890 *error_msg = "App image class roots must have pointer equality with runtime ones.";
1891 return false;
1892 }
1893 }
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001894 const OatFile* oat_file = space->GetOatFile();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001895 if (oat_file->GetOatHeader().GetDexFileCount() !=
1896 static_cast<uint32_t>(dex_caches->GetLength())) {
1897 *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
1898 "image";
1899 return false;
1900 }
1901
Alex Lighta9bbc082019-11-14 14:51:41 -08001902 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
David Brazdil3e8aae02019-03-26 18:48:02 +00001903 std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001904 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1905 dex_file_location.c_str(),
1906 error_msg);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001907 if (dex_file == nullptr) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001908 return false;
1909 }
1910
Orion Hodsonb9b7d912021-02-24 09:24:47 +00001911 LinearAlloc* linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader.Get());
1912 DCHECK(linear_alloc != nullptr);
1913 DCHECK_EQ(linear_alloc == Runtime::Current()->GetLinearAlloc(), !app_image);
David Srbecky86d6cd52020-12-02 18:13:10 +00001914 {
Orion Hodsonb9b7d912021-02-24 09:24:47 +00001915 // Native fields are all null. Initialize them and allocate native memory.
David Srbecky86d6cd52020-12-02 18:13:10 +00001916 WriterMutexLock mu(self, *Locks::dex_lock_);
Orion Hodsonb9b7d912021-02-24 09:24:47 +00001917 dex_cache->InitializeNativeFields(dex_file.get(), linear_alloc);
David Srbecky86d6cd52020-12-02 18:13:10 +00001918 }
1919 if (!app_image) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001920 // Register dex files, keep track of existing ones that are conflicts.
Mathieu Chartier0a19e212019-11-27 14:35:24 -08001921 AppendToBootClassPath(dex_file.get(), dex_cache);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001922 }
1923 out_dex_files->push_back(std::move(dex_file));
1924 }
1925
1926 if (app_image) {
1927 ScopedObjectAccessUnchecked soa(Thread::Current());
Nicolas Geoffrayf0d30022018-11-20 17:45:38 +00001928 ScopedAssertNoThreadSuspension sants("Checking app image", soa.Self());
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001929 if (IsBootClassLoader(soa, image_class_loader.Get())) {
1930 *error_msg = "Unexpected BootClassLoader in app image";
1931 return false;
1932 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001933 }
1934
Orion Hodson5880c772020-07-28 20:12:08 +01001935 if (kCheckImageObjects) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001936 if (!app_image) {
Orion Hodson5880c772020-07-28 20:12:08 +01001937 ImageChecker::CheckObjects(heap, this);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001938 }
1939 }
1940
1941 // Set entry point to interpreter if in InterpretOnly mode.
1942 if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001943 // Set image methods' entry point to interpreter.
1944 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1945 if (!method.IsRuntimeMethod()) {
1946 DCHECK(method.GetDeclaringClass() != nullptr);
Ulya Trafimovich5439f052020-07-29 10:03:46 +01001947 if (!method.IsNative() && !method.IsResolutionMethod()) {
1948 method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
1949 image_pointer_size_);
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001950 }
1951 }
1952 }, space->Begin(), image_pointer_size_);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001953 }
1954
Nicolas Geoffray47171752020-08-31 15:03:20 +01001955 if (!runtime->IsAotCompiler()) {
Nicolas Geoffraybd728b02021-01-27 13:21:35 +00001956 ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
Nicolas Geoffray47171752020-08-31 15:03:20 +01001957 bool can_use_nterp = interpreter::CanRuntimeUseNterp();
Nicolas Geoffray7e2c9632020-01-09 13:41:10 +00001958 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray47171752020-08-31 15:03:20 +01001959 // In the image, the `data` pointer field of the ArtMethod contains the code
1960 // item offset. Change this to the actual pointer to the code item.
1961 if (method.HasCodeItem()) {
1962 const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
1963 reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
Nicolas Geoffraye1d2dce2020-09-21 10:06:31 +01001964 method.SetCodeItem(code_item);
Nicolas Geoffray47171752020-08-31 15:03:20 +01001965 }
1966 // Set image methods' entry point that point to the interpreter bridge to the
1967 // nterp entry point.
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001968 if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
1969 if (can_use_nterp) {
Nicolas Geoffrayb1cf8372021-02-02 13:32:20 +00001970 DCHECK(!NeedsClinitCheckBeforeCall(&method) ||
1971 method.GetDeclaringClass()->IsVisiblyInitialized());
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001972 method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
1973 } else {
1974 method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
1975 }
Nicolas Geoffray47171752020-08-31 15:03:20 +01001976 }
Nicolas Geoffray7e2c9632020-01-09 13:41:10 +00001977 }, space->Begin(), image_pointer_size_);
1978 }
1979
Nicolas Geoffray8c41a0b2020-02-06 16:52:11 +00001980 if (runtime->IsVerificationSoftFail()) {
1981 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1982 if (!method.IsNative() && method.IsInvokable()) {
1983 method.ClearSkipAccessChecks();
1984 }
1985 }, space->Begin(), image_pointer_size_);
1986 }
1987
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08001988 ClassTable* class_table = nullptr;
1989 {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001990 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08001991 class_table = InsertClassTableForClassLoader(class_loader.Get());
Mathieu Chartier69731002016-03-02 16:08:31 -08001992 }
1993 // If we have a class table section, read it and use it for verification in
1994 // UpdateAppImageClassLoadersAndDexCaches.
1995 ClassTable::ClassSet temp_set;
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001996 const ImageSection& class_table_section = header.GetClassTableSection();
Mathieu Chartier69731002016-03-02 16:08:31 -08001997 const bool added_class_table = class_table_section.Size() > 0u;
1998 if (added_class_table) {
1999 const uint64_t start_time2 = NanoTime();
2000 size_t read_count = 0;
2001 temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2002 /*make copy*/false,
2003 &read_count);
Mathieu Chartier69731002016-03-02 16:08:31 -08002004 VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002005 }
2006 if (app_image) {
David Srbecky86d6cd52020-12-02 18:13:10 +00002007 AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
Mathieu Chartier456b4922018-11-06 10:35:48 -08002008
2009 {
2010 ScopedTrace trace("AppImage:UpdateClassLoaders");
2011 // Update class loader and resolved strings. If added_class_table is false, the resolved
2012 // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002013 ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
Mathieu Chartier456b4922018-11-06 10:35:48 -08002014 for (const ClassTable::TableSlot& root : temp_set) {
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002015 // Note: We probably don't need the read barrier unless we copy the app image objects into
2016 // the region space.
2017 ObjPtr<mirror::Class> klass(root.Read());
2018 // Do not update class loader for boot image classes where the app image
2019 // class loader is only the initiating loader but not the defining loader.
2020 // Avoid read barrier since we are comparing against null.
2021 if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
Vladimir Markob68bb7a2020-03-17 10:55:25 +00002022 klass->SetClassLoader(loader);
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002023 }
Mathieu Chartier456b4922018-11-06 10:35:48 -08002024 }
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002025 }
Igor Murashkin86083f72017-10-27 10:59:04 -07002026
Vladimir Marko305c38b2018-02-14 11:50:07 +00002027 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -07002028 // Every class in the app image has initially SubtypeCheckInfo in the
2029 // Uninitialized state.
2030 //
2031 // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2032 // after class initialization is complete. The app image ClassStatus as-is
2033 // are almost all ClassStatus::Initialized, and being in the
2034 // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2035 //
2036 // Force every app image class's SubtypeCheck to be at least kIninitialized.
2037 //
2038 // See also ImageWriter::FixupClass.
Chris Wailes23866362018-08-22 16:16:58 -07002039 ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
Igor Murashkin86083f72017-10-27 10:59:04 -07002040 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2041 for (const ClassTable::TableSlot& root : temp_set) {
Vladimir Marko38b8b252018-01-02 19:07:06 +00002042 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
Igor Murashkin86083f72017-10-27 10:59:04 -07002043 }
2044 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00002045 }
2046 if (!oat_file->GetBssGcRoots().empty()) {
2047 // Insert oat file to class table for visiting .bss GC roots.
2048 class_table->InsertOatFile(oat_file);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002049 }
Igor Murashkin86083f72017-10-27 10:59:04 -07002050
Mathieu Chartier69731002016-03-02 16:08:31 -08002051 if (added_class_table) {
2052 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2053 class_table->AddClassSet(std::move(temp_set));
2054 }
Andreas Gampebe7af222017-07-25 09:57:28 -07002055
Mathieu Chartier69731002016-03-02 16:08:31 -08002056 if (kIsDebugBuild && app_image) {
2057 // This verification needs to happen after the classes have been added to the class loader.
2058 // Since it ensures classes are in the class table.
Chris Wailes23866362018-08-22 16:16:58 -07002059 ScopedTrace trace("AppImage:Verify");
David Srbecky86d6cd52020-12-02 18:13:10 +00002060 VerifyAppImage(header, class_loader, class_table, space);
Mathieu Chartier69731002016-03-02 16:08:31 -08002061 }
Andreas Gampebe7af222017-07-25 09:57:28 -07002062
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002063 VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08002064 return true;
Brian Carlstroma663ea52011-08-19 23:33:41 -07002065}
2066
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002067void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
Mathieu Chartier7778b882015-10-05 16:41:10 -07002068 // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2069 // enabling tracing requires the mutator lock, there are no race conditions here.
2070 const bool tracing_enabled = Trace::IsTracingEnabled();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002071 Thread* const self = Thread::Current();
2072 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002073 if (kUseReadBarrier) {
2074 // We do not track new roots for CC.
2075 DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2076 kVisitRootFlagClearRootLog |
2077 kVisitRootFlagStartLoggingNewRoots |
2078 kVisitRootFlagStopLoggingNewRoots));
2079 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002080 if ((flags & kVisitRootFlagAllRoots) != 0) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002081 // Argument for how root visiting deals with ArtField and ArtMethod roots.
2082 // There is 3 GC cases to handle:
2083 // Non moving concurrent:
2084 // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
Mathieu Chartierda7c6502015-07-23 16:01:26 -07002085 // live by the class and class roots.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002086 //
2087 // Moving non-concurrent:
2088 // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2089 // To prevent missing roots, this case needs to ensure that there is no
2090 // suspend points between the point which we allocate ArtMethod arrays and place them in a
2091 // class which is in the class table.
2092 //
2093 // Moving concurrent:
2094 // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2095 // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -08002096 //
2097 // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2098 // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2099 // these objects.
2100 UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
Andreas Gampe2af99022017-04-25 08:32:59 -07002101 boot_class_table_->VisitRoots(root_visitor);
Mathieu Chartier7778b882015-10-05 16:41:10 -07002102 // If tracing is enabled, then mark all the class loaders to prevent unloading.
neo.chaea2d1b282016-11-08 08:40:46 +09002103 if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
Mathieu Chartier7778b882015-10-05 16:41:10 -07002104 for (const ClassLoaderData& data : class_loaders_) {
2105 GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2106 root.VisitRoot(visitor, RootInfo(kRootVMInternal));
2107 }
2108 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002109 } else if (!kUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
Mathieu Chartierc2e20622014-11-03 11:41:47 -08002110 for (auto& root : new_class_roots_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002111 ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002112 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002113 ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002114 // Concurrent moving GC marked new roots through the to-space invariant.
2115 CHECK_EQ(new_ref, old_ref);
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002116 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00002117 for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2118 for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2119 ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2120 if (old_ref != nullptr) {
2121 DCHECK(old_ref->IsClass());
2122 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2123 ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2124 // Concurrent moving GC marked new roots through the to-space invariant.
2125 CHECK_EQ(new_ref, old_ref);
2126 }
2127 }
2128 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002129 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002130 if (!kUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002131 new_class_roots_.clear();
Vladimir Marko1998cd02017-01-13 13:02:58 +00002132 new_bss_roots_boot_oat_files_.clear();
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002133 }
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002134 if (!kUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00002135 log_new_roots_ = true;
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002136 } else if (!kUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00002137 log_new_roots_ = false;
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002138 }
2139 // We deliberately ignore the class roots in the image since we
2140 // handle image roots by using the MS/CMS rescanning of dirty cards.
2141}
2142
Brian Carlstroma663ea52011-08-19 23:33:41 -07002143// Keep in sync with InitCallback. Anything we visit, we need to
2144// reinit references to when reinitializing a ClassLinker from a
2145// mapped image.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002146void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
Mathieu Chartier31000802015-06-14 14:14:37 -07002147 class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002148 VisitClassRoots(visitor, flags);
Mathieu Chartier6cfc2c02015-10-12 15:06:16 -07002149 // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2150 // unloading if we are marking roots.
2151 DropFindArrayClassCache();
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07002152}
2153
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002154class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2155 public:
2156 explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2157 : visitor_(visitor),
2158 done_(false) {}
2159
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002160 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002161 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002162 ClassTable* const class_table = class_loader->GetClassTable();
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002163 if (!done_ && class_table != nullptr) {
2164 DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2165 if (!class_table->Visit(visitor)) {
2166 // If the visitor ClassTable returns false it means that we don't need to continue.
2167 done_ = true;
2168 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002169 }
2170 }
2171
2172 private:
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002173 // Class visitor that limits the class visits from a ClassTable to the classes with
2174 // the provided defining class loader. This filter is used to avoid multiple visits
2175 // of the same class which can be recorded for multiple initiating class loaders.
2176 class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2177 public:
2178 DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2179 ClassVisitor* visitor)
2180 : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2181
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002182 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002183 if (klass->GetClassLoader() != defining_class_loader_) {
2184 return true;
2185 }
2186 return (*visitor_)(klass);
2187 }
2188
Vladimir Marko0984e482019-03-27 16:41:41 +00002189 const ObjPtr<mirror::ClassLoader> defining_class_loader_;
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002190 ClassVisitor* const visitor_;
2191 };
2192
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002193 ClassVisitor* const visitor_;
2194 // If done is true then we don't need to do any more visiting.
2195 bool done_;
2196};
2197
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002198void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
Andreas Gampe2af99022017-04-25 08:32:59 -07002199 if (boot_class_table_->Visit(*visitor)) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002200 VisitClassLoaderClassesVisitor loader_visitor(visitor);
2201 VisitClassLoaders(&loader_visitor);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002202 }
2203}
2204
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002205void ClassLinker::VisitClasses(ClassVisitor* visitor) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002206 Thread* const self = Thread::Current();
2207 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2208 // Not safe to have thread suspension when we are holding a lock.
2209 if (self != nullptr) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002210 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002211 VisitClassesInternal(visitor);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002212 } else {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002213 VisitClassesInternal(visitor);
Elliott Hughesa2155262011-11-16 16:26:58 -08002214 }
2215}
2216
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002217class GetClassesInToVector : public ClassVisitor {
2218 public:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002219 bool operator()(ObjPtr<mirror::Class> klass) override {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002220 classes_.push_back(klass);
2221 return true;
2222 }
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002223 std::vector<ObjPtr<mirror::Class>> classes_;
Ian Rogersdbf3be02014-08-29 15:40:08 -07002224};
2225
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002226class GetClassInToObjectArray : public ClassVisitor {
2227 public:
2228 explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2229 : arr_(arr), index_(0) {}
2230
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002231 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002232 ++index_;
2233 if (index_ <= arr_->GetLength()) {
2234 arr_->Set(index_ - 1, klass);
2235 return true;
2236 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002237 return false;
2238 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002239
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002240 bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002241 return index_ <= arr_->GetLength();
2242 }
2243
2244 private:
2245 mirror::ObjectArray<mirror::Class>* const arr_;
2246 int32_t index_;
2247};
2248
2249void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002250 // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2251 // is avoiding duplicates.
2252 if (!kMovingClasses) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002253 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002254 GetClassesInToVector accumulator;
2255 VisitClasses(&accumulator);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002256 for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002257 if (!visitor->operator()(klass)) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002258 return;
2259 }
2260 }
2261 } else {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002262 Thread* const self = Thread::Current();
Ian Rogersdbf3be02014-08-29 15:40:08 -07002263 StackHandleScope<1> hs(self);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002264 auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
Ian Rogersdbf3be02014-08-29 15:40:08 -07002265 // We size the array assuming classes won't be added to the class table during the visit.
2266 // If this assumption fails we iterate again.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002267 while (true) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002268 size_t class_table_size;
2269 {
Ian Rogers7b078e82014-09-10 14:44:24 -07002270 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002271 // Add 100 in case new classes get loaded when we are filling in the object array.
2272 class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
Ian Rogersdbf3be02014-08-29 15:40:08 -07002273 }
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002274 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Ian Rogersdbf3be02014-08-29 15:40:08 -07002275 classes.Assign(
2276 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002277 CHECK(classes != nullptr); // OOME.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002278 GetClassInToObjectArray accumulator(classes.Get());
2279 VisitClasses(&accumulator);
2280 if (accumulator.Succeeded()) {
2281 break;
2282 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002283 }
2284 for (int32_t i = 0; i < classes->GetLength(); ++i) {
2285 // If the class table shrank during creation of the clases array we expect null elements. If
2286 // the class table grew then the loop repeats. If classes are created after the loop has
2287 // finished then we don't visit.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002288 ObjPtr<mirror::Class> klass = classes->Get(i);
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002289 if (klass != nullptr && !visitor->operator()(klass)) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002290 return;
2291 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -07002292 }
2293 }
2294}
2295
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07002296ClassLinker::~ClassLinker() {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002297 Thread* const self = Thread::Current();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07002298 for (const ClassLoaderData& data : class_loaders_) {
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002299 // CHA unloading analysis is not needed. No negative consequences are expected because
2300 // all the classloaders are deleted at the same time.
Andreas Gampe98ea9d92018-10-19 14:06:15 -07002301 DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
Mathieu Chartier6b069532015-08-05 15:08:12 -07002302 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002303 class_loaders_.clear();
Vladimir Markobf121912019-06-04 13:49:05 +01002304 while (!running_visibly_initialized_callbacks_.empty()) {
2305 std::unique_ptr<VisiblyInitializedCallback> callback(
2306 std::addressof(running_visibly_initialized_callbacks_.front()));
2307 running_visibly_initialized_callbacks_.pop_front();
2308 }
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07002309}
2310
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002311void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002312 Runtime* const runtime = Runtime::Current();
2313 JavaVMExt* const vm = runtime->GetJavaVM();
2314 vm->DeleteWeakGlobalRef(self, data.weak_root);
Calin Juravlee5de54c2016-04-20 14:22:09 +01002315 // Notify the JIT that we need to remove the methods and/or profiling info.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002316 if (runtime->GetJit() != nullptr) {
2317 jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2318 if (code_cache != nullptr) {
Mathieu Chartiercf79cf52017-07-21 11:17:57 -07002319 // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002320 code_cache->RemoveMethodsIn(self, *data.allocator);
2321 }
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07002322 } else if (cha_ != nullptr) {
Mathieu Chartiercf79cf52017-07-21 11:17:57 -07002323 // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07002324 cha_->RemoveDependenciesForLinearAlloc(data.allocator);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002325 }
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002326 // Cleanup references to single implementation ArtMethods that will be deleted.
2327 if (cleanup_cha) {
2328 CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2329 data.class_table->Visit<CHAOnDeleteUpdateClassVisitor, kWithoutReadBarrier>(visitor);
2330 }
Vladimir Marko86c87522020-05-11 16:55:55 +01002331 {
2332 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2333 auto end = critical_native_code_with_clinit_check_.end();
2334 for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2335 if (data.allocator->ContainsUnsafe(it->first)) {
2336 it = critical_native_code_with_clinit_check_.erase(it);
2337 } else {
2338 ++it;
2339 }
2340 }
2341 }
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002342
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002343 delete data.allocator;
2344 delete data.class_table;
2345}
2346
Vladimir Markobcf17522018-06-01 13:14:32 +01002347ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2348 return ObjPtr<mirror::PointerArray>::DownCast(
Andreas Gampe542451c2016-07-26 09:02:02 -07002349 image_pointer_size_ == PointerSize::k64
Vladimir Markobcf17522018-06-01 13:14:32 +01002350 ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2351 : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002352}
2353
David Srbecky86d6cd52020-12-02 18:13:10 +00002354ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002355 StackHandleScope<1> hs(self);
Mathieu Chartier28bd2e42016-10-04 13:54:57 -07002356 auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002357 GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002358 if (dex_cache == nullptr) {
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002359 self->AssertPendingOOMException();
2360 return nullptr;
2361 }
Vladimir Marko31c3daa2019-06-13 12:18:37 +01002362 // Use InternWeak() so that the location String can be collected when the ClassLoader
2363 // with this DexCache is collected.
2364 ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002365 if (location == nullptr) {
2366 self->AssertPendingOOMException();
2367 return nullptr;
2368 }
David Srbecky86d6cd52020-12-02 18:13:10 +00002369 dex_cache->SetLocation(location);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002370 return dex_cache.Get();
Brian Carlstroma0808032011-07-18 00:39:23 -07002371}
2372
Orion Hodsonb9b7d912021-02-24 09:24:47 +00002373ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(Thread* self,
2374 const DexFile& dex_file,
2375 LinearAlloc* linear_alloc) {
David Srbecky86d6cd52020-12-02 18:13:10 +00002376 ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002377 if (dex_cache != nullptr) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08002378 WriterMutexLock mu(self, *Locks::dex_lock_);
Orion Hodsonb9b7d912021-02-24 09:24:47 +00002379 dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002380 }
Vladimir Markobcf17522018-06-01 13:14:32 +01002381 return dex_cache;
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002382}
2383
Vladimir Marko70e2a762019-07-12 16:49:00 +01002384template <bool kMovable, typename PreFenceVisitor>
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002385ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2386 ObjPtr<mirror::Class> java_lang_Class,
Vladimir Marko70e2a762019-07-12 16:49:00 +01002387 uint32_t class_size,
2388 const PreFenceVisitor& pre_fence_visitor) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08002389 DCHECK_GE(class_size, sizeof(mirror::Class));
Ian Rogers1d54e732013-05-02 21:10:01 -07002390 gc::Heap* heap = Runtime::Current()->GetHeap();
Roland Levillain0e840272018-08-23 19:55:30 +01002391 ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
Vladimir Marko70e2a762019-07-12 16:49:00 +01002392 heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2393 heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
Ian Rogers6fac4472014-02-25 17:01:10 -08002394 if (UNLIKELY(k == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002395 self->AssertPendingOOMException();
Ian Rogers6fac4472014-02-25 17:01:10 -08002396 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07002397 }
Ian Rogers6fac4472014-02-25 17:01:10 -08002398 return k->AsClass();
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07002399}
2400
Vladimir Marko70e2a762019-07-12 16:49:00 +01002401template <bool kMovable>
2402ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2403 ObjPtr<mirror::Class> java_lang_Class,
2404 uint32_t class_size) {
2405 mirror::Class::InitializeClassVisitor visitor(class_size);
2406 return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2407}
2408
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002409ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002410 return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
Brian Carlstroma0808032011-07-18 00:39:23 -07002411}
2412
Vladimir Marko70e2a762019-07-12 16:49:00 +01002413void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2414 ClassRoot primitive_root,
2415 ClassRoot array_root) {
Roland Levillain0e840272018-08-23 19:55:30 +01002416 // We make this class non-movable for the unlikely case where it were to be
2417 // moved by a sticky-bit (minor) collection when using the Generational
2418 // Concurrent Copying (CC) collector, potentially creating a stale reference
2419 // in the `klass_` field of one of its instances allocated in the Large-Object
2420 // Space (LOS) -- see the comment about the dirty card scanning logic in
2421 // art::gc::collector::ConcurrentCopying::MarkingPhase.
Vladimir Marko70e2a762019-07-12 16:49:00 +01002422 ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2423 self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2424 ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2425 DCHECK(component_type->IsPrimitive());
2426 array_class->SetComponentType(component_type);
2427 SetClassRoot(array_root, array_class);
2428}
2429
2430void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2431 ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2432 array_class->SetSuperClass(java_lang_Object);
2433 array_class->SetVTable(java_lang_Object->GetVTable());
2434 array_class->SetPrimitiveType(Primitive::kPrimNot);
2435 ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2436 array_class->SetClassFlags(component_type->IsPrimitive()
2437 ? mirror::kClassFlagNoReferenceFields
2438 : mirror::kClassFlagObjectArray);
2439 array_class->SetClassLoader(component_type->GetClassLoader());
2440 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2441 array_class->PopulateEmbeddedVTable(image_pointer_size_);
2442 ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2443 array_class->SetImt(object_imt, image_pointer_size_);
2444 // Skip EnsureSkipAccessChecksMethods(). We can skip the verified status,
2445 // the kAccVerificationAttempted flag is added below, and there are no
2446 // methods that need the kAccSkipAccessChecks flag.
2447 DCHECK_EQ(array_class->NumMethods(), 0u);
2448
2449 // don't need to set new_class->SetObjectSize(..)
2450 // because Object::SizeOf delegates to Array::SizeOf
2451
2452 // All arrays have java/lang/Cloneable and java/io/Serializable as
2453 // interfaces. We need to set that up here, so that stuff like
2454 // "instanceof" works right.
2455
2456 // Use the single, global copies of "interfaces" and "iftable"
2457 // (remember not to free them for arrays).
2458 {
2459 ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2460 CHECK(array_iftable != nullptr);
2461 array_class->SetIfTable(array_iftable);
2462 }
2463
2464 // Inherit access flags from the component type.
2465 int access_flags = component_type->GetAccessFlags();
2466 // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2467 access_flags &= kAccJavaFlagsMask;
2468 // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2469 // and remove "interface".
2470 access_flags |= kAccAbstract | kAccFinal;
2471 access_flags &= ~kAccInterface;
2472 // Arrays are access-checks-clean and preverified.
2473 access_flags |= kAccVerificationAttempted;
2474
Vladimir Markob68bb7a2020-03-17 10:55:25 +00002475 array_class->SetAccessFlagsDuringLinking(access_flags);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002476
Vladimir Markobf121912019-06-04 13:49:05 +01002477 // Array classes are fully initialized either during single threaded startup,
2478 // or from a pre-fence visitor, so visibly initialized.
2479 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002480}
2481
2482void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2483 // Do not hold lock on the array class object, the initialization of
2484 // core array classes is done while the process is still single threaded.
2485 ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2486 FinishArrayClassSetup(array_class);
2487
2488 std::string temp;
2489 const char* descriptor = array_class->GetDescriptor(&temp);
2490 size_t hash = ComputeModifiedUtf8Hash(descriptor);
2491 ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2492 CHECK(existing == nullptr);
Roland Levillain0e840272018-08-23 19:55:30 +01002493}
2494
Vladimir Markobcf17522018-06-01 13:14:32 +01002495ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07002496 Thread* self,
2497 size_t length) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07002498 return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002499 self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
Shih-wei Liao55df06b2011-08-26 14:39:27 -07002500}
2501
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002502ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2503 const char* descriptor,
2504 ObjPtr<mirror::Class> klass) {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07002505 DCHECK(klass != nullptr);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002506 if (kIsDebugBuild) {
2507 StackHandleScope<1> hs(self);
2508 HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2509 Thread::PoisonObjectPointersIfDebug();
2510 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002511
2512 // For temporary classes we must wait for them to be retired.
2513 if (init_done_ && klass->IsTemp()) {
2514 CHECK(!klass->IsResolved());
Vladimir Marko72ab6842017-01-20 19:32:50 +00002515 if (klass->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002516 ThrowEarlierClassFailure(klass);
2517 return nullptr;
2518 }
2519 StackHandleScope<1> hs(self);
2520 Handle<mirror::Class> h_class(hs.NewHandle(klass));
2521 ObjectLock<mirror::Class> lock(self, h_class);
2522 // Loop and wait for the resolving thread to retire this class.
Vladimir Marko72ab6842017-01-20 19:32:50 +00002523 while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002524 lock.WaitIgnoringInterrupts();
2525 }
Vladimir Marko72ab6842017-01-20 19:32:50 +00002526 if (h_class->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002527 ThrowEarlierClassFailure(h_class.Get());
2528 return nullptr;
2529 }
2530 CHECK(h_class->IsRetired());
2531 // Get the updated class from class table.
Andreas Gampe34ee6842014-12-02 15:43:52 -08002532 klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002533 }
2534
Brian Carlstromaded5f72011-10-07 17:15:04 -07002535 // Wait for the class if it has not already been linked.
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002536 size_t index = 0;
2537 // Maximum number of yield iterations until we start sleeping.
2538 static const size_t kNumYieldIterations = 1000;
2539 // How long each sleep is in us.
2540 static const size_t kSleepDurationUS = 1000; // 1 ms.
Vladimir Marko72ab6842017-01-20 19:32:50 +00002541 while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002542 StackHandleScope<1> hs(self);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002543 HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002544 {
2545 ObjectTryLock<mirror::Class> lock(self, h_class);
2546 // Can not use a monitor wait here since it may block when returning and deadlock if another
2547 // thread has locked klass.
2548 if (lock.Acquired()) {
2549 // Check for circular dependencies between classes, the lock is required for SetStatus.
2550 if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2551 ThrowClassCircularityError(h_class.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +00002552 mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002553 return nullptr;
2554 }
2555 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002556 }
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002557 {
2558 // Handle wrapper deals with klass moving.
2559 ScopedThreadSuspension sts(self, kSuspended);
2560 if (index < kNumYieldIterations) {
2561 sched_yield();
2562 } else {
2563 usleep(kSleepDurationUS);
2564 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002565 }
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002566 ++index;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002567 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002568
Vladimir Marko72ab6842017-01-20 19:32:50 +00002569 if (klass->IsErroneousUnresolved()) {
Elliott Hughes4a2b4172011-09-20 17:08:25 -07002570 ThrowEarlierClassFailure(klass);
Mathieu Chartierc528dba2013-11-26 12:00:11 -08002571 return nullptr;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002572 }
2573 // Return the loaded class. No exceptions should be pending.
David Sehr709b0702016-10-13 09:12:37 -07002574 CHECK(klass->IsResolved()) << klass->PrettyClass();
Ian Rogers62d6c772013-02-27 08:32:07 -08002575 self->AssertNoPendingException();
Vladimir Markobcf17522018-06-01 13:14:32 +01002576 return klass;
Brian Carlstromaded5f72011-10-07 17:15:04 -07002577}
2578
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08002579using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
Ian Rogers68b56852014-08-29 20:19:11 -07002580
2581// Search a collection of DexFiles for a descriptor
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002582ClassPathEntry FindInClassPath(const char* descriptor,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07002583 size_t hash, const std::vector<const DexFile*>& class_path) {
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002584 for (const DexFile* dex_file : class_path) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08002585 DCHECK(dex_file != nullptr);
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08002586 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07002587 if (dex_class_def != nullptr) {
Ian Rogers68b56852014-08-29 20:19:11 -07002588 return ClassPathEntry(dex_file, dex_class_def);
2589 }
2590 }
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002591 return ClassPathEntry(nullptr, nullptr);
Ian Rogers68b56852014-08-29 20:19:11 -07002592}
2593
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002594bool ClassLinker::FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable& soa,
2595 Thread* self,
2596 const char* descriptor,
2597 size_t hash,
2598 Handle<mirror::ClassLoader> class_loader,
2599 /*out*/ ObjPtr<mirror::Class>* result) {
2600 ArtField* field =
2601 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
2602 ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
2603 if (raw_shared_libraries == nullptr) {
2604 return true;
2605 }
2606
2607 StackHandleScope<2> hs(self);
2608 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
2609 hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
2610 MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
Alex Lighta9bbc082019-11-14 14:51:41 -08002611 for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
2612 temp_loader.Assign(loader);
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002613 if (!FindClassInBaseDexClassLoader(soa, self, descriptor, hash, temp_loader, result)) {
2614 return false; // One of the shared libraries is not supported.
2615 }
2616 if (*result != nullptr) {
2617 return true; // Found the class up the chain.
2618 }
2619 }
2620 return true;
2621}
2622
Nicolas Geoffray7d8d8ff2016-11-02 12:38:05 +00002623bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
2624 Thread* self,
2625 const char* descriptor,
2626 size_t hash,
2627 Handle<mirror::ClassLoader> class_loader,
Vladimir Markobcf17522018-06-01 13:14:32 +01002628 /*out*/ ObjPtr<mirror::Class>* result) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002629 // Termination case: boot class loader.
Andreas Gampef865ea92015-04-13 22:14:19 -07002630 if (IsBootClassLoader(soa, class_loader.Get())) {
Calin Juravle415dc3d2017-06-28 11:03:12 -07002631 *result = FindClassInBootClassLoaderClassPath(self, descriptor, hash);
Andreas Gampef865ea92015-04-13 22:14:19 -07002632 return true;
2633 }
2634
David Brazdil05909d82018-12-06 16:25:16 +00002635 if (IsPathOrDexClassLoader(soa, class_loader) || IsInMemoryDexClassLoader(soa, class_loader)) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002636 // For regular path or dex class loader the search order is:
2637 // - parent
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002638 // - shared libraries
Calin Juravlecdd49122017-07-05 20:09:53 -07002639 // - class loader dex files
Andreas Gampef865ea92015-04-13 22:14:19 -07002640
Calin Juravlecdd49122017-07-05 20:09:53 -07002641 // Handles as RegisterDexFile may allocate dex caches (and cause thread suspension).
2642 StackHandleScope<1> hs(self);
2643 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2644 if (!FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result)) {
2645 return false; // One of the parents is not supported.
2646 }
2647 if (*result != nullptr) {
2648 return true; // Found the class up the chain.
2649 }
Andreas Gampef865ea92015-04-13 22:14:19 -07002650
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002651 if (!FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result)) {
2652 return false; // One of the shared library loader is not supported.
2653 }
2654 if (*result != nullptr) {
2655 return true; // Found the class in a shared library.
2656 }
2657
Calin Juravlecdd49122017-07-05 20:09:53 -07002658 // Search the current class loader classpath.
2659 *result = FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader);
Andreas Gampe501c3b02019-04-17 21:54:27 +00002660 return !soa.Self()->IsExceptionPending();
Andreas Gampef865ea92015-04-13 22:14:19 -07002661 }
2662
Calin Juravlecdd49122017-07-05 20:09:53 -07002663 if (IsDelegateLastClassLoader(soa, class_loader)) {
2664 // For delegate last, the search order is:
2665 // - boot class path
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002666 // - shared libraries
Calin Juravlecdd49122017-07-05 20:09:53 -07002667 // - class loader dex files
2668 // - parent
2669 *result = FindClassInBootClassLoaderClassPath(self, descriptor, hash);
2670 if (*result != nullptr) {
2671 return true; // The class is part of the boot class path.
2672 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00002673 if (self->IsExceptionPending()) {
2674 // Pending exception means there was an error other than ClassNotFound that must be returned
2675 // to the caller.
2676 return false;
2677 }
Calin Juravlecdd49122017-07-05 20:09:53 -07002678
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002679 if (!FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result)) {
2680 return false; // One of the shared library loader is not supported.
2681 }
2682 if (*result != nullptr) {
2683 return true; // Found the class in a shared library.
2684 }
2685
Calin Juravlecdd49122017-07-05 20:09:53 -07002686 *result = FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader);
2687 if (*result != nullptr) {
2688 return true; // Found the class in the current class loader
2689 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00002690 if (self->IsExceptionPending()) {
2691 // Pending exception means there was an error other than ClassNotFound that must be returned
2692 // to the caller.
2693 return false;
2694 }
Calin Juravlecdd49122017-07-05 20:09:53 -07002695
2696 // Handles as RegisterDexFile may allocate dex caches (and cause thread suspension).
2697 StackHandleScope<1> hs(self);
2698 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2699 return FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result);
2700 }
2701
2702 // Unsupported class loader.
2703 *result = nullptr;
2704 return false;
Calin Juravle415dc3d2017-06-28 11:03:12 -07002705}
2706
Andreas Gampe501c3b02019-04-17 21:54:27 +00002707namespace {
2708
2709// Matches exceptions caught in DexFile.defineClass.
2710ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
2711 ClassLinker* class_linker)
2712 REQUIRES_SHARED(Locks::mutator_lock_) {
2713 return
2714 // ClassNotFoundException.
2715 throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
2716 class_linker))
2717 ||
2718 // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
2719 throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
2720}
2721
2722// Clear exceptions caught in DexFile.defineClass.
2723ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
2724 REQUIRES_SHARED(Locks::mutator_lock_) {
2725 if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
2726 self->ClearException();
2727 }
2728}
2729
2730} // namespace
2731
Calin Juravle415dc3d2017-06-28 11:03:12 -07002732// Finds the class in the boot class loader.
2733// If the class is found the method returns the resolved class. Otherwise it returns null.
2734ObjPtr<mirror::Class> ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
2735 const char* descriptor,
2736 size_t hash) {
2737 ObjPtr<mirror::Class> result = nullptr;
2738 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2739 if (pair.second != nullptr) {
2740 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
2741 if (klass != nullptr) {
2742 result = EnsureResolved(self, descriptor, klass);
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002743 } else {
Calin Juravle415dc3d2017-06-28 11:03:12 -07002744 result = DefineClass(self,
2745 descriptor,
2746 hash,
2747 ScopedNullHandle<mirror::ClassLoader>(),
2748 *pair.first,
2749 *pair.second);
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002750 }
Calin Juravle415dc3d2017-06-28 11:03:12 -07002751 if (result == nullptr) {
2752 CHECK(self->IsExceptionPending()) << descriptor;
Andreas Gampe501c3b02019-04-17 21:54:27 +00002753 FilterDexFileCaughtExceptions(self, this);
Andreas Gampef865ea92015-04-13 22:14:19 -07002754 }
2755 }
Calin Juravle415dc3d2017-06-28 11:03:12 -07002756 return result;
2757}
Andreas Gampef865ea92015-04-13 22:14:19 -07002758
Calin Juravle415dc3d2017-06-28 11:03:12 -07002759ObjPtr<mirror::Class> ClassLinker::FindClassInBaseDexClassLoaderClassPath(
2760 ScopedObjectAccessAlreadyRunnable& soa,
2761 const char* descriptor,
2762 size_t hash,
2763 Handle<mirror::ClassLoader> class_loader) {
David Brazdil05909d82018-12-06 16:25:16 +00002764 DCHECK(IsPathOrDexClassLoader(soa, class_loader) ||
2765 IsInMemoryDexClassLoader(soa, class_loader) ||
2766 IsDelegateLastClassLoader(soa, class_loader))
Calin Juravle415dc3d2017-06-28 11:03:12 -07002767 << "Unexpected class loader for descriptor " << descriptor;
Andreas Gampef865ea92015-04-13 22:14:19 -07002768
Vladimir Marko68c07582021-04-19 16:01:15 +00002769 const DexFile* dex_file = nullptr;
2770 const dex::ClassDef* class_def = nullptr;
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002771 ObjPtr<mirror::Class> ret;
Vladimir Marko68c07582021-04-19 16:01:15 +00002772 auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
2773 const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
2774 if (cp_class_def != nullptr) {
2775 dex_file = cp_dex_file;
2776 class_def = cp_class_def;
2777 return false; // Found a class definition, stop visit.
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002778 }
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002779 return true; // Continue with the next DexFile.
2780 };
Vladimir Marko68c07582021-04-19 16:01:15 +00002781 VisitClassLoaderDexFiles(soa, class_loader, find_class_def);
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002782
Vladimir Marko68c07582021-04-19 16:01:15 +00002783 ObjPtr<mirror::Class> klass = nullptr;
2784 if (class_def != nullptr) {
2785 klass = DefineClass(soa.Self(), descriptor, hash, class_loader, *dex_file, *class_def);
2786 if (UNLIKELY(klass == nullptr)) {
2787 CHECK(soa.Self()->IsExceptionPending()) << descriptor;
2788 FilterDexFileCaughtExceptions(soa.Self(), this);
2789 } else {
2790 DCHECK(!soa.Self()->IsExceptionPending());
2791 }
2792 }
2793 return klass;
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002794}
2795
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002796ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
2797 const char* descriptor,
2798 Handle<mirror::ClassLoader> class_loader) {
Elliott Hughesba8eee12012-01-24 20:25:24 -08002799 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
Ian Rogers98379392014-02-24 16:53:16 -08002800 DCHECK(self != nullptr);
Ian Rogers00f7d0e2012-07-19 15:28:27 -07002801 self->AssertNoPendingException();
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07002802 self->PoisonObjectPointers(); // For DefineClass, CreateArrayClass, etc...
Elliott Hughesc3b77c72011-12-15 20:56:48 -08002803 if (descriptor[1] == '\0') {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08002804 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
2805 // for primitive classes that aren't backed by dex files.
2806 return FindPrimitiveClass(descriptor[0]);
2807 }
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002808 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Brian Carlstromaded5f72011-10-07 17:15:04 -07002809 // Find the class in the loaded classes table.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002810 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
Ian Rogers68b56852014-08-29 20:19:11 -07002811 if (klass != nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002812 return EnsureResolved(self, descriptor, klass);
Brian Carlstromaded5f72011-10-07 17:15:04 -07002813 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07002814 // Class is not yet loaded.
Andreas Gampefa4333d2017-02-14 11:10:34 -08002815 if (descriptor[0] != '[' && class_loader == nullptr) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002816 // Non-array class and the boot class loader, search the boot class path.
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002817 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
Ian Rogers68b56852014-08-29 20:19:11 -07002818 if (pair.second != nullptr) {
Mathieu Chartier9865bde2015-12-21 09:58:16 -08002819 return DefineClass(self,
2820 descriptor,
2821 hash,
2822 ScopedNullHandle<mirror::ClassLoader>(),
2823 *pair.first,
Ian Rogers7b078e82014-09-10 14:44:24 -07002824 *pair.second);
Ian Rogers63557452014-06-04 16:57:15 -07002825 } else {
2826 // The boot class loader is searched ahead of the application class loader, failures are
2827 // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
2828 // trigger the chaining with a proper stack trace.
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002829 ObjPtr<mirror::Throwable> pre_allocated =
2830 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002831 self->SetException(pre_allocated);
Ian Rogers63557452014-06-04 16:57:15 -07002832 return nullptr;
Jesse Wilson47daf872011-11-23 11:42:45 -05002833 }
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002834 }
2835 ObjPtr<mirror::Class> result_ptr;
2836 bool descriptor_equals;
2837 if (descriptor[0] == '[') {
2838 result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
2839 DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
2840 DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
2841 descriptor_equals = true;
Jesse Wilson47daf872011-11-23 11:42:45 -05002842 } else {
Ian Rogers98379392014-02-24 16:53:16 -08002843 ScopedObjectAccessUnchecked soa(self);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002844 bool known_hierarchy =
2845 FindClassInBaseDexClassLoader(soa, self, descriptor, hash, class_loader, &result_ptr);
2846 if (result_ptr != nullptr) {
2847 // The chain was understood and we found the class. We still need to add the class to
2848 // the class table to protect from racy programs that can try and redefine the path list
2849 // which would change the Class<?> returned for subsequent evaluation of const-class.
2850 DCHECK(known_hierarchy);
2851 DCHECK(result_ptr->DescriptorEquals(descriptor));
2852 descriptor_equals = true;
Andreas Gampe501c3b02019-04-17 21:54:27 +00002853 } else if (!self->IsExceptionPending()) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002854 // Either the chain wasn't understood or the class wasn't found.
Andreas Gampe501c3b02019-04-17 21:54:27 +00002855 // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
2856 // we should return it instead of silently clearing and retrying.
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002857 //
2858 // If the chain was understood but we did not find the class, let the Java-side
2859 // rediscover all this and throw the exception with the right stack trace. Note that
2860 // the Java-side could still succeed for racy programs if another thread is actively
2861 // modifying the class loader's path list.
Andreas Gampef865ea92015-04-13 22:14:19 -07002862
Alex Light185a4612018-10-04 15:54:25 -07002863 // The runtime is not allowed to call into java from a runtime-thread so just abort.
Alex Lighte9f61032018-09-24 16:04:51 -07002864 if (self->IsRuntimeThread()) {
Calin Juravleccd56952016-12-15 17:57:38 +00002865 // Oops, we can't call into java so we can't run actual class-loader code.
2866 // This is true for e.g. for the compiler (jit or aot).
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002867 ObjPtr<mirror::Throwable> pre_allocated =
2868 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
2869 self->SetException(pre_allocated);
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00002870 return nullptr;
2871 }
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002872
Vladimir Marko5fdd7782017-04-20 11:26:03 +01002873 // Inlined DescriptorToDot(descriptor) with extra validation.
2874 //
2875 // Throw NoClassDefFoundError early rather than potentially load a class only to fail
2876 // the DescriptorEquals() check below and give a confusing error message. For example,
2877 // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
2878 // instead of "Ljava/lang/String;", the message below using the "dot" names would be
2879 // "class loader [...] returned class java.lang.String instead of java.lang.String".
2880 size_t descriptor_length = strlen(descriptor);
2881 if (UNLIKELY(descriptor[0] != 'L') ||
2882 UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
2883 UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
2884 ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
2885 return nullptr;
2886 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00002887
Vladimir Marko5fdd7782017-04-20 11:26:03 +01002888 std::string class_name_string(descriptor + 1, descriptor_length - 2);
2889 std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
Andreas Gampe87658f32019-04-18 18:39:02 +00002890 if (known_hierarchy &&
2891 fast_class_not_found_exceptions_ &&
2892 !Runtime::Current()->IsJavaDebuggable()) {
2893 // For known hierarchy, we know that the class is going to throw an exception. If we aren't
2894 // debuggable, optimize this path by throwing directly here without going back to Java
2895 // language. This reduces how many ClassNotFoundExceptions happen.
2896 self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
2897 "%s",
2898 class_name_string.c_str());
2899 } else {
2900 ScopedLocalRef<jobject> class_loader_object(
2901 soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get()));
2902 ScopedLocalRef<jobject> result(soa.Env(), nullptr);
2903 {
2904 ScopedThreadStateChange tsc(self, kNative);
2905 ScopedLocalRef<jobject> class_name_object(
2906 soa.Env(), soa.Env()->NewStringUTF(class_name_string.c_str()));
2907 if (class_name_object.get() == nullptr) {
2908 DCHECK(self->IsExceptionPending()); // OOME.
2909 return nullptr;
2910 }
2911 CHECK(class_loader_object.get() != nullptr);
2912 result.reset(soa.Env()->CallObjectMethod(class_loader_object.get(),
2913 WellKnownClasses::java_lang_ClassLoader_loadClass,
2914 class_name_object.get()));
2915 }
2916 if (result.get() == nullptr && !self->IsExceptionPending()) {
2917 // broken loader - throw NPE to be compatible with Dalvik
2918 ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
2919 class_name_string.c_str()).c_str());
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002920 return nullptr;
2921 }
Andreas Gampe87658f32019-04-18 18:39:02 +00002922 result_ptr = soa.Decode<mirror::Class>(result.get());
2923 // Check the name of the returned class.
2924 descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002925 }
Andreas Gampe501c3b02019-04-17 21:54:27 +00002926 } else {
2927 DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
Vladimir Marko2c8c6b62016-12-01 17:42:00 +00002928 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07002929 }
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002930
2931 if (self->IsExceptionPending()) {
2932 // If the ClassLoader threw or array class allocation failed, pass that exception up.
2933 // However, to comply with the RI behavior, first check if another thread succeeded.
2934 result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
2935 if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
2936 self->ClearException();
2937 return EnsureResolved(self, descriptor, result_ptr);
2938 }
2939 return nullptr;
2940 }
2941
2942 // Try to insert the class to the class table, checking for mismatch.
2943 ObjPtr<mirror::Class> old;
2944 {
2945 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2946 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
2947 old = class_table->Lookup(descriptor, hash);
2948 if (old == nullptr) {
2949 old = result_ptr; // For the comparison below, after releasing the lock.
2950 if (descriptor_equals) {
Vladimir Markobcf17522018-06-01 13:14:32 +01002951 class_table->InsertWithHash(result_ptr, hash);
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07002952 WriteBarrier::ForEveryFieldWrite(class_loader.Get());
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002953 } // else throw below, after releasing the lock.
2954 }
2955 }
2956 if (UNLIKELY(old != result_ptr)) {
2957 // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
2958 // capable class loaders. (All class loaders are considered parallel capable on Android.)
Vladimir Markodfc0de72019-04-01 10:57:55 +01002959 ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00002960 const char* loader_class_name =
2961 loader_class->GetDexFile().StringByTypeIdx(loader_class->GetDexTypeIndex());
2962 LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
2963 << " is not well-behaved; it returned a different Class for racing loadClass(\""
2964 << DescriptorToDot(descriptor) << "\").";
2965 return EnsureResolved(self, descriptor, old);
2966 }
2967 if (UNLIKELY(!descriptor_equals)) {
2968 std::string result_storage;
2969 const char* result_name = result_ptr->GetDescriptor(&result_storage);
2970 std::string loader_storage;
2971 const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
2972 ThrowNoClassDefFoundError(
2973 "Initiating class loader of type %s returned class %s instead of %s.",
2974 DescriptorToDot(loader_class_name).c_str(),
2975 DescriptorToDot(result_name).c_str(),
2976 DescriptorToDot(descriptor).c_str());
2977 return nullptr;
2978 }
Vladimir Markobcf17522018-06-01 13:14:32 +01002979 // Success.
2980 return result_ptr;
Brian Carlstromaded5f72011-10-07 17:15:04 -07002981}
2982
Alex Light270db1c2019-12-03 12:20:01 +00002983// Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
2984// define-class and how many recursive DefineClasses we are at in order to allow for doing things
2985// like pausing class definition.
2986struct ScopedDefiningClass {
2987 public:
2988 explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
2989 : self_(self), returned_(false) {
2990 Locks::mutator_lock_->AssertSharedHeld(self_);
2991 Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
2992 self_->IncrDefineClassCount();
2993 }
2994 ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
2995 Locks::mutator_lock_->AssertSharedHeld(self_);
2996 CHECK(returned_);
2997 }
2998
2999 ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3000 REQUIRES_SHARED(Locks::mutator_lock_) {
3001 CHECK(!returned_);
3002 self_->DecrDefineClassCount();
3003 Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3004 Thread::PoisonObjectPointersIfDebug();
3005 returned_ = true;
3006 return h_klass.Get();
3007 }
3008
3009 ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3010 REQUIRES_SHARED(Locks::mutator_lock_) {
3011 StackHandleScope<1> hs(self_);
3012 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3013 return Finish(h_klass);
3014 }
3015
3016 ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
3017 REQUIRES_SHARED(Locks::mutator_lock_) {
3018 ScopedNullHandle<mirror::Class> snh;
3019 return Finish(snh);
3020 }
3021
3022 private:
3023 Thread* self_;
3024 bool returned_;
3025};
3026
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01003027ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3028 const char* descriptor,
3029 size_t hash,
3030 Handle<mirror::ClassLoader> class_loader,
3031 const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003032 const dex::ClassDef& dex_class_def) {
Alex Light270db1c2019-12-03 12:20:01 +00003033 ScopedDefiningClass sdc(self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003034 StackHandleScope<3> hs(self);
Eric Holk74584e62021-02-18 14:39:17 -08003035 metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003036 auto klass = hs.NewHandle<mirror::Class>(nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003037
Brian Carlstromaded5f72011-10-07 17:15:04 -07003038 // Load the class from the dex file.
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003039 if (UNLIKELY(!init_done_)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003040 // finish up init of hand crafted class_roots_
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003041 if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003042 klass.Assign(GetClassRoot<mirror::Object>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003043 } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003044 klass.Assign(GetClassRoot<mirror::Class>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003045 } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003046 klass.Assign(GetClassRoot<mirror::String>(this));
Fred Shih4ee7a662014-07-11 09:59:27 -07003047 } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003048 klass.Assign(GetClassRoot<mirror::Reference>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003049 } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003050 klass.Assign(GetClassRoot<mirror::DexCache>(this));
Alex Lightd6251582016-10-31 11:12:30 -07003051 } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003052 klass.Assign(GetClassRoot<mirror::ClassExt>(this));
Brian Carlstromaded5f72011-10-07 17:15:04 -07003053 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003054 }
3055
Vladimir Markob9c29f62019-03-20 14:22:51 +00003056 // For AOT-compilation of an app, we may use a shortened boot class path that excludes
3057 // some runtime modules. Prevent definition of classes in app class loader that could clash
3058 // with these modules as these classes could be resolved differently during execution.
3059 if (class_loader != nullptr &&
3060 Runtime::Current()->IsAotCompiler() &&
Vladimir Markod1f73512020-04-02 10:50:35 +01003061 IsUpdatableBootClassPathDescriptor(descriptor)) {
Vladimir Markob9c29f62019-03-20 14:22:51 +00003062 ObjPtr<mirror::Throwable> pre_allocated =
3063 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3064 self->SetException(pre_allocated);
Alex Light270db1c2019-12-03 12:20:01 +00003065 return sdc.Finish(nullptr);
Vladimir Markob9c29f62019-03-20 14:22:51 +00003066 }
3067
Calin Juravle33787682019-07-26 14:27:18 -07003068 // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3069 // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3070 // public class path then we prevent the definition of the class.
3071 //
3072 // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3073 // classpath is not checked.
3074 if (class_loader == nullptr &&
3075 Runtime::Current()->IsAotCompiler() &&
3076 DenyAccessBasedOnPublicSdk(descriptor)) {
3077 ObjPtr<mirror::Throwable> pre_allocated =
3078 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3079 self->SetException(pre_allocated);
3080 return sdc.Finish(nullptr);
3081 }
3082
Alex Lighte9f61032018-09-24 16:04:51 -07003083 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3084 // code to be executed. We put it up here so we can avoid all the allocations associated with
3085 // creating the class. This can happen with (eg) jit threads.
3086 if (!self->CanLoadClasses()) {
3087 // Make sure we don't try to load anything, potentially causing an infinite loop.
3088 ObjPtr<mirror::Throwable> pre_allocated =
3089 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3090 self->SetException(pre_allocated);
Alex Light270db1c2019-12-03 12:20:01 +00003091 return sdc.Finish(nullptr);
Alex Lighte9f61032018-09-24 16:04:51 -07003092 }
3093
Andreas Gampefa4333d2017-02-14 11:10:34 -08003094 if (klass == nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003095 // Allocate a class with the status of not ready.
3096 // Interface object should get the right size here. Regular class will
3097 // figure out the right size later and be replaced with one of the right
3098 // size when the class becomes resolved.
Chang Xing0c2c2222017-08-04 14:36:17 -07003099 if (CanAllocClass()) {
3100 klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3101 } else {
Alex Light270db1c2019-12-03 12:20:01 +00003102 return sdc.Finish(nullptr);
Chang Xing0c2c2222017-08-04 14:36:17 -07003103 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07003104 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08003105 if (UNLIKELY(klass == nullptr)) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003106 self->AssertPendingOOMException();
Alex Light270db1c2019-12-03 12:20:01 +00003107 return sdc.Finish(nullptr);
Ian Rogersa436fde2013-08-27 23:34:06 -07003108 }
Alex Lightb0f11922017-01-23 14:25:17 -08003109 // Get the real dex file. This will return the input if there aren't any callbacks or they do
3110 // nothing.
3111 DexFile const* new_dex_file = nullptr;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003112 dex::ClassDef const* new_class_def = nullptr;
Alex Lightb0f11922017-01-23 14:25:17 -08003113 // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3114 // will only be called once.
3115 Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3116 klass,
3117 class_loader,
3118 dex_file,
3119 dex_class_def,
3120 &new_dex_file,
3121 &new_class_def);
Alex Light440b5d92017-01-24 15:32:25 -08003122 // Check to see if an exception happened during runtime callbacks. Return if so.
3123 if (self->IsExceptionPending()) {
Alex Light270db1c2019-12-03 12:20:01 +00003124 return sdc.Finish(nullptr);
Alex Light440b5d92017-01-24 15:32:25 -08003125 }
Alex Lightb0f11922017-01-23 14:25:17 -08003126 ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003127 if (dex_cache == nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00003128 self->AssertPendingException();
Alex Light270db1c2019-12-03 12:20:01 +00003129 return sdc.Finish(nullptr);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003130 }
3131 klass->SetDexCache(dex_cache);
Alex Lightb0f11922017-01-23 14:25:17 -08003132 SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
Mathieu Chartierc7853442015-03-27 14:35:38 -07003133
Jeff Hao848f70a2014-01-15 13:49:50 -08003134 // Mark the string class by setting its access flag.
3135 if (UNLIKELY(!init_done_)) {
3136 if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3137 klass->SetStringClass();
3138 }
3139 }
3140
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07003141 ObjectLock<mirror::Class> lock(self, klass);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003142 klass->SetClinitThreadId(self->GetTid());
Mathieu Chartier1e4841e2016-12-15 14:21:04 -08003143 // Make sure we have a valid empty iftable even if there are errors.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003144 klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003145
Mathieu Chartier590fee92013-09-13 13:46:47 -07003146 // Add the newly loaded class to the loaded classes table.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003147 ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
Ian Rogersc114b5f2014-07-21 08:55:01 -07003148 if (existing != nullptr) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07003149 // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3150 // this thread to block.
Alex Light270db1c2019-12-03 12:20:01 +00003151 return sdc.Finish(EnsureResolved(self, descriptor, existing));
Brian Carlstromaded5f72011-10-07 17:15:04 -07003152 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003153
Mathieu Chartierc7853442015-03-27 14:35:38 -07003154 // Load the fields and other things after we are inserted in the table. This is so that we don't
3155 // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3156 // other reason is that the field roots are only visited from the class table. So we need to be
3157 // inserted before we allocate / fill in these fields.
Alex Lightb0f11922017-01-23 14:25:17 -08003158 LoadClass(self, *new_dex_file, *new_class_def, klass);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003159 if (self->IsExceptionPending()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003160 VLOG(class_linker) << self->GetException()->Dump();
Mathieu Chartierc7853442015-03-27 14:35:38 -07003161 // An exception occured during load, set status to erroneous while holding klass' lock in case
3162 // notification is necessary.
3163 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003164 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003165 }
Alex Light270db1c2019-12-03 12:20:01 +00003166 return sdc.Finish(nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003167 }
3168
Brian Carlstromaded5f72011-10-07 17:15:04 -07003169 // Finish loading (if necessary) by finding parents
3170 CHECK(!klass->IsLoaded());
Alex Lightb0f11922017-01-23 14:25:17 -08003171 if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003172 // Loading failed.
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003173 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003174 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003175 }
Alex Light270db1c2019-12-03 12:20:01 +00003176 return sdc.Finish(nullptr);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003177 }
3178 CHECK(klass->IsLoaded());
Andreas Gampe0f01b582017-01-18 15:22:37 -08003179
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07003180 // At this point the class is loaded. Publish a ClassLoad event.
Andreas Gampe0f01b582017-01-18 15:22:37 -08003181 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
Andreas Gampeac30fa22017-01-18 21:02:36 -08003182 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
Andreas Gampe0f01b582017-01-18 15:22:37 -08003183
Brian Carlstromaded5f72011-10-07 17:15:04 -07003184 // Link the class (if necessary)
3185 CHECK(!klass->IsResolved());
Mathieu Chartier590fee92013-09-13 13:46:47 -07003186 // TODO: Use fast jobjects?
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003187 auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003188
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07003189 MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
Igor Murashkinb1d8c312015-08-04 11:18:43 -07003190 if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003191 // Linking failed.
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003192 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003193 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003194 }
Alex Light270db1c2019-12-03 12:20:01 +00003195 return sdc.Finish(nullptr);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003196 }
Mathieu Chartier524507a2014-08-27 15:28:28 -07003197 self->AssertNoPendingException();
Andreas Gampefa4333d2017-02-14 11:10:34 -08003198 CHECK(h_new_class != nullptr) << descriptor;
Vladimir Marko72ab6842017-01-20 19:32:50 +00003199 CHECK(h_new_class->IsResolved() && !h_new_class->IsErroneousResolved()) << descriptor;
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003200
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003201 // Instrumentation may have updated entrypoints for all methods of all
3202 // classes. However it could not update methods of this class while we
3203 // were loading it. Now the class is resolved, we can update entrypoints
3204 // as required by instrumentation.
3205 if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) {
3206 // We must be in the kRunnable state to prevent instrumentation from
3207 // suspending all threads to update entrypoints while we are doing it
3208 // for this class.
3209 DCHECK_EQ(self->GetState(), kRunnable);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07003210 Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003211 }
3212
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003213 /*
3214 * We send CLASS_PREPARE events to the debugger from here. The
3215 * definition of "preparation" is creating the static fields for a
3216 * class and initializing them to the standard default values, but not
3217 * executing any code (that comes later, during "initialization").
3218 *
3219 * We did the static preparation in LinkClass.
3220 *
3221 * The class has been prepared and resolved but possibly not yet verified
3222 * at this point.
3223 */
Andreas Gampeac30fa22017-01-18 21:02:36 -08003224 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003225
Tamas Berghammer160e6df2016-01-05 14:29:02 +00003226 // Notify native debugger of the new class and its layout.
3227 jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3228
Alex Light270db1c2019-12-03 12:20:01 +00003229 return sdc.Finish(h_new_class);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07003230}
3231
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003232uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003233 const dex::ClassDef& dex_class_def) {
Brian Carlstrom4873d462011-08-21 15:23:39 -07003234 size_t num_ref = 0;
Fred Shih37f05ef2014-07-16 18:38:08 -07003235 size_t num_8 = 0;
3236 size_t num_16 = 0;
Brian Carlstrom4873d462011-08-21 15:23:39 -07003237 size_t num_32 = 0;
3238 size_t num_64 = 0;
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003239 ClassAccessor accessor(dex_file, dex_class_def);
3240 // We allow duplicate definitions of the same field in a class_data_item
3241 // but ignore the repeated indexes here, b/21868015.
3242 uint32_t last_field_idx = dex::kDexNoIndex;
3243 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3244 uint32_t field_idx = field.GetIndex();
3245 // Ordering enforced by DexFileVerifier.
3246 DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3247 if (UNLIKELY(field_idx == last_field_idx)) {
3248 continue;
3249 }
3250 last_field_idx = field_idx;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003251 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003252 const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3253 char c = descriptor[0];
3254 switch (c) {
3255 case 'L':
3256 case '[':
3257 num_ref++;
3258 break;
3259 case 'J':
3260 case 'D':
3261 num_64++;
3262 break;
3263 case 'I':
3264 case 'F':
3265 num_32++;
3266 break;
3267 case 'S':
3268 case 'C':
3269 num_16++;
3270 break;
3271 case 'B':
3272 case 'Z':
3273 num_8++;
3274 break;
3275 default:
3276 LOG(FATAL) << "Unknown descriptor: " << c;
3277 UNREACHABLE();
Brian Carlstrom4873d462011-08-21 15:23:39 -07003278 }
3279 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003280 return mirror::Class::ComputeClassSize(false,
3281 0,
3282 num_8,
3283 num_16,
3284 num_32,
3285 num_64,
3286 num_ref,
Mathieu Chartiere401d142015-04-22 13:56:20 -07003287 image_pointer_size_);
Brian Carlstrom4873d462011-08-21 15:23:39 -07003288}
3289
Alex Lightfc49fec2018-01-16 22:28:36 +00003290// Special case to get oat code without overwriting a trampoline.
3291const void* ClassLinker::GetQuickOatCodeFor(ArtMethod* method) {
David Sehr709b0702016-10-13 09:12:37 -07003292 CHECK(method->IsInvokable()) << method->PrettyMethod();
Nicolas Geoffraya7a47592015-11-24 09:17:30 +00003293 if (method->IsProxyMethod()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -08003294 return GetQuickProxyInvokeHandler();
Jeff Hao8df6cea2013-07-29 13:54:48 -07003295 }
Nicolas Geoffray32384402019-07-17 20:06:44 +01003296 const void* code = method->GetOatMethodQuickCode(GetImagePointerSize());
Alex Lightfc49fec2018-01-16 22:28:36 +00003297 if (code != nullptr) {
3298 return code;
Mathieu Chartier2535abe2015-02-17 10:38:49 -08003299 }
Nicolas Geoffray32384402019-07-17 20:06:44 +01003300
3301 jit::Jit* jit = Runtime::Current()->GetJit();
3302 if (jit != nullptr) {
3303 code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
3304 if (code != nullptr) {
3305 return code;
3306 }
3307 }
3308
Alex Lightfc49fec2018-01-16 22:28:36 +00003309 if (method->IsNative()) {
3310 // No code and native? Use generic trampoline.
3311 return GetQuickGenericJniStub();
3312 }
Nicolas Geoffray00391822019-12-10 10:17:23 +00003313
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00003314 if (interpreter::CanRuntimeUseNterp() && CanMethodUseNterp(method)) {
Nicolas Geoffray00391822019-12-10 10:17:23 +00003315 return interpreter::GetNterpEntryPoint();
3316 }
3317
Alex Lightfc49fec2018-01-16 22:28:36 +00003318 return GetQuickToInterpreterBridge();
TDYa12785321912012-04-01 15:24:56 -07003319}
3320
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003321bool ClassLinker::ShouldUseInterpreterEntrypoint(ArtMethod* method, const void* quick_code) {
Alex Light2d441b12018-06-08 15:33:21 -07003322 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003323 if (UNLIKELY(method->IsNative() || method->IsProxyMethod())) {
3324 return false;
3325 }
3326
Elliott Hughes956af0f2014-12-11 14:34:28 -08003327 if (quick_code == nullptr) {
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003328 return true;
3329 }
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003330
3331 Runtime* runtime = Runtime::Current();
3332 instrumentation::Instrumentation* instr = runtime->GetInstrumentation();
3333 if (instr->InterpretOnly()) {
3334 return true;
3335 }
3336
3337 if (runtime->GetClassLinker()->IsQuickToInterpreterBridge(quick_code)) {
3338 // Doing this check avoids doing compiled/interpreter transitions.
3339 return true;
3340 }
3341
Alex Lightfc588092020-01-23 15:39:08 -08003342 if (Thread::Current()->IsForceInterpreter()) {
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003343 // Force the use of interpreter when it is required by the debugger.
3344 return true;
3345 }
3346
Alex Light8f34aba2017-10-09 13:46:32 -07003347 if (Thread::Current()->IsAsyncExceptionPending()) {
3348 // Force use of interpreter to handle async-exceptions
3349 return true;
3350 }
3351
Alex Light2d441b12018-06-08 15:33:21 -07003352 if (quick_code == GetQuickInstrumentationEntryPoint()) {
3353 const void* instr_target = instr->GetCodeForInvoke(method);
3354 DCHECK_NE(instr_target, GetQuickInstrumentationEntryPoint()) << method->PrettyMethod();
3355 return ShouldUseInterpreterEntrypoint(method, instr_target);
3356 }
3357
Nicolas Geoffray433b79a2017-01-30 20:54:45 +00003358 if (runtime->IsJavaDebuggable()) {
3359 // For simplicity, we ignore precompiled code and go to the interpreter
3360 // assuming we don't already have jitted code.
3361 // We could look at the oat file where `quick_code` is being defined,
3362 // and check whether it's been compiled debuggable, but we decided to
3363 // only rely on the JIT for debuggable apps.
Alex Light6b16d892016-11-11 11:21:04 -08003364 jit::Jit* jit = Runtime::Current()->GetJit();
3365 return (jit == nullptr) || !jit->GetCodeCache()->ContainsPc(quick_code);
3366 }
3367
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +00003368 if (runtime->IsNativeDebuggable()) {
Calin Juravlee5de54c2016-04-20 14:22:09 +01003369 DCHECK(runtime->UseJitCompilation() && runtime->GetJit()->JitAtFirstUse());
David Srbeckyf4480162016-03-16 00:06:24 +00003370 // If we are doing native debugging, ignore application's AOT code,
Nicolas Geoffray433b79a2017-01-30 20:54:45 +00003371 // since we want to JIT it (at first use) with extra stackmaps for native
3372 // debugging. We keep however all AOT code from the boot image,
3373 // since the JIT-at-first-use is blocking and would result in non-negligible
3374 // startup performance impact.
David Srbeckyf4480162016-03-16 00:06:24 +00003375 return !runtime->GetHeap()->IsInBootImageOatFile(quick_code);
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003376 }
3377
3378 return false;
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003379}
3380
Vladimir Marko86c87522020-05-11 16:55:55 +01003381void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
Alex Light2d441b12018-06-08 15:33:21 -07003382 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Vladimir Markocce414f2019-10-07 08:51:33 +01003383 DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
Vladimir Marko86c87522020-05-11 16:55:55 +01003384 size_t num_direct_methods = klass->NumDirectMethods();
3385 if (num_direct_methods == 0) {
Ian Rogers1c829822013-09-30 18:18:50 -07003386 return; // No direct methods => no static methods.
Ian Rogers19846512012-02-24 11:42:47 -08003387 }
Vladimir Markocce414f2019-10-07 08:51:33 +01003388 if (UNLIKELY(klass->IsProxyClass())) {
3389 return;
3390 }
Vladimir Marko86c87522020-05-11 16:55:55 +01003391 PointerSize pointer_size = image_pointer_size_;
3392 if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3393 klass->GetDirectMethods(pointer_size).end(),
3394 [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3395 // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3396 // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3397 ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3398 ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3399 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3400 auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3401 while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3402 lb->first->SetEntryPointFromJni(lb->second);
3403 lb = critical_native_code_with_clinit_check_.erase(lb);
3404 }
3405 }
Ian Rogers62d6c772013-02-27 08:32:07 -08003406 Runtime* runtime = Runtime::Current();
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07003407 if (!runtime->IsStarted()) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08003408 if (runtime->IsAotCompiler() || runtime->GetHeap()->HasBootImageSpace()) {
Alex Light64ad14d2014-08-19 14:23:13 -07003409 return; // OAT file unavailable.
3410 }
Ian Rogers19846512012-02-24 11:42:47 -08003411 }
Alex Light64ad14d2014-08-19 14:23:13 -07003412
Mathieu Chartierf8322842014-05-16 10:59:25 -07003413 const DexFile& dex_file = klass->GetDexFile();
Ian Rogers97b52f82014-08-14 11:34:07 -07003414 bool has_oat_class;
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003415 OatFile::OatClass oat_class = OatFile::FindOatClass(dex_file,
3416 klass->GetDexClassDefIndex(),
3417 &has_oat_class);
Ian Rogers1c829822013-09-30 18:18:50 -07003418 // Link the code of methods skipped by LinkCode.
Vladimir Marko86c87522020-05-11 16:55:55 +01003419 for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3420 ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003421 if (!method->IsStatic()) {
3422 // Only update static methods.
3423 continue;
Ian Rogers19846512012-02-24 11:42:47 -08003424 }
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003425 const void* quick_code = nullptr;
Nicolas Geoffray00391822019-12-10 10:17:23 +00003426
3427 // In order:
3428 // 1) Check if we have AOT Code.
3429 // 2) Check if we have JIT Code.
3430 // 3) Check if we can use Nterp.
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003431 if (has_oat_class) {
3432 OatFile::OatMethod oat_method = oat_class.GetOatMethod(method_index);
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003433 quick_code = oat_method.GetQuickCode();
3434 }
Nicolas Geoffray00391822019-12-10 10:17:23 +00003435
Vladimir Markocce414f2019-10-07 08:51:33 +01003436 jit::Jit* jit = runtime->GetJit();
Nicolas Geoffray32384402019-07-17 20:06:44 +01003437 if (quick_code == nullptr && jit != nullptr) {
3438 quick_code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
Nicolas Geoffray7989ac92019-04-10 12:42:30 +01003439 }
Nicolas Geoffray00391822019-12-10 10:17:23 +00003440
3441 if (quick_code == nullptr &&
3442 interpreter::CanRuntimeUseNterp() &&
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00003443 CanMethodUseNterp(method)) {
Nicolas Geoffray00391822019-12-10 10:17:23 +00003444 quick_code = interpreter::GetNterpEntryPoint();
3445 }
3446
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003447 // Check whether the method is native, in which case it's generic JNI.
Ulya Trafimovich5439f052020-07-29 10:03:46 +01003448 if (quick_code == nullptr && method->IsNative()) {
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003449 quick_code = GetQuickGenericJniStub();
3450 } else if (ShouldUseInterpreterEntrypoint(method, quick_code)) {
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003451 // Use interpreter entry point.
Nicolas Geoffray00391822019-12-10 10:17:23 +00003452 if (IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode())) {
3453 // If we have the trampoline or the bridge already, no need to update.
3454 // This saves in not dirtying boot image memory.
3455 continue;
3456 }
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003457 quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003458 }
Nicolas Geoffray00391822019-12-10 10:17:23 +00003459 CHECK(quick_code != nullptr);
Elliott Hughes956af0f2014-12-11 14:34:28 -08003460 runtime->GetInstrumentation()->UpdateMethodsCode(method, quick_code);
Ian Rogers19846512012-02-24 11:42:47 -08003461 }
Ian Rogers62d6c772013-02-27 08:32:07 -08003462 // Ignore virtual methods on the iterator.
Ian Rogers19846512012-02-24 11:42:47 -08003463}
3464
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003465// Does anything needed to make sure that the compiler will not generate a direct invoke to this
3466// method. Should only be called on non-invokable methods.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +00003467inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3468 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light9139e002015-10-09 15:59:48 -07003469 DCHECK(method != nullptr);
3470 DCHECK(!method->IsInvokable());
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003471 method->SetEntryPointFromQuickCompiledCodePtrSize(
3472 class_linker->GetQuickToInterpreterBridgeTrampoline(),
3473 class_linker->GetImagePointerSize());
Alex Light9139e002015-10-09 15:59:48 -07003474}
3475
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003476static void LinkCode(ClassLinker* class_linker,
3477 ArtMethod* method,
3478 const OatFile::OatClass* oat_class,
3479 uint32_t class_def_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light2d441b12018-06-08 15:33:21 -07003480 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003481 Runtime* const runtime = Runtime::Current();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08003482 if (runtime->IsAotCompiler()) {
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003483 // The following code only applies to a non-compiler runtime.
3484 return;
3485 }
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003486
Ian Rogers62d6c772013-02-27 08:32:07 -08003487 // Method shouldn't have already been linked.
Ian Rogersef7d42f2014-01-06 12:55:46 -08003488 DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr);
Jeff Hao16743632013-05-08 10:59:04 -07003489
Alex Light9139e002015-10-09 15:59:48 -07003490 if (!method->IsInvokable()) {
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003491 EnsureThrowsInvocationError(class_linker, method);
Brian Carlstrom92827a52011-10-10 15:50:01 -07003492 return;
3493 }
Ian Rogers19846512012-02-24 11:42:47 -08003494
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003495 const void* quick_code = nullptr;
3496 if (oat_class != nullptr) {
3497 // Every kind of method should at least get an invoke stub from the oat_method.
3498 // non-abstract methods also get their code pointers.
3499 const OatFile::OatMethod oat_method = oat_class->GetOatMethod(class_def_method_index);
3500 quick_code = oat_method.GetQuickCode();
3501 }
3502
3503 bool enter_interpreter = class_linker->ShouldUseInterpreterEntrypoint(method, quick_code);
3504
3505 // Note: this mimics the logic in image_writer.cc that installs the resolution
3506 // stub only if we have compiled code and the method needs a class initialization
3507 // check.
Ulya Trafimovich5439f052020-07-29 10:03:46 +01003508 if (quick_code == nullptr) {
Nicolas Geoffray4ef36492021-06-16 17:26:51 +01003509 if (method->IsNative()) {
3510 method->SetEntryPointFromQuickCompiledCode(GetQuickGenericJniStub());
Nicolas Geoffray4ef36492021-06-16 17:26:51 +01003511 } else {
Nicolas Geoffray6cd7ab32021-07-07 09:35:50 +01003512 // Note we cannot use the nterp entrypoint because we do not know if the
3513 // method will need the slow interpreter for lock verification. This will
3514 // be updated in EnsureSkipAccessChecksMethods.
Nicolas Geoffray4ef36492021-06-16 17:26:51 +01003515 method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
3516 }
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003517 } else if (enter_interpreter) {
3518 method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
Vladimir Marko5115a4d2019-10-17 14:56:47 +01003519 } else if (NeedsClinitCheckBeforeCall(method)) {
3520 DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized()); // Actually ClassStatus::Idx.
3521 // If we do have code but the method needs a class initialization check before calling
3522 // that code, install the resolution stub that will perform the check.
Sebastien Hertz7d658cf2013-07-09 10:56:11 +02003523 // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
3524 // after initializing class (see ClassLinker::InitializeClass method).
Ian Rogers6f3dbba2014-10-14 17:41:57 -07003525 method->SetEntryPointFromQuickCompiledCode(GetQuickResolutionStub());
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003526 } else {
3527 method->SetEntryPointFromQuickCompiledCode(quick_code);
Ian Rogers0d6de042012-02-29 08:50:26 -08003528 }
jeffhao26c0a1a2012-01-17 16:28:33 -08003529
Ian Rogers62d6c772013-02-27 08:32:07 -08003530 if (method->IsNative()) {
Vladimir Marko86c87522020-05-11 16:55:55 +01003531 // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3532 // as the extra processing for @CriticalNative is not needed yet.
3533 method->SetEntryPointFromJni(
3534 method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
Andreas Gampe90546832014-03-12 18:07:19 -07003535
Tamas Berghammerdd5e5e92016-02-12 16:29:00 +00003536 if (enter_interpreter || quick_code == nullptr) {
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003537 // We have a native method here without code. Then it should have the generic JNI
3538 // trampoline as entrypoint.
Ian Rogers6f3dbba2014-10-14 17:41:57 -07003539 // TODO: this doesn't handle all the cases where trampolines may be installed.
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003540 DCHECK(class_linker->IsQuickGenericJniStub(method->GetEntryPointFromQuickCompiledCode()));
Andreas Gampe90546832014-03-12 18:07:19 -07003541 }
Brian Carlstrom92827a52011-10-10 15:50:01 -07003542 }
3543}
3544
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003545void ClassLinker::SetupClass(const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003546 const dex::ClassDef& dex_class_def,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003547 Handle<mirror::Class> klass,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003548 ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08003549 CHECK(klass != nullptr);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003550 CHECK(klass->GetDexCache() != nullptr);
Vladimir Marko2c64a832018-01-04 11:31:56 +00003551 CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
Brian Carlstromf615a612011-07-23 12:50:34 -07003552 const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003553 CHECK(descriptor != nullptr);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003554
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003555 klass->SetClass(GetClassRoot<mirror::Class>(this));
Andreas Gampe51829322014-08-25 15:05:04 -07003556 uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
Brian Carlstrom8e3fb142013-10-09 21:00:27 -07003557 CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
Vladimir Markob68bb7a2020-03-17 10:55:25 +00003558 klass->SetAccessFlagsDuringLinking(access_flags);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07003559 klass->SetClassLoader(class_loader);
Ian Rogersc2b44472011-12-14 21:17:17 -08003560 DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
Vladimir Marko2c64a832018-01-04 11:31:56 +00003561 mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003562
Ian Rogers8b2c0b92013-09-19 02:56:49 -07003563 klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003564 klass->SetDexTypeIndex(dex_class_def.class_idx_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003565}
Brian Carlstrom934486c2011-07-12 23:42:50 -07003566
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003567LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3568 LinearAlloc* allocator,
3569 size_t length) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003570 if (length == 0) {
3571 return nullptr;
3572 }
Vladimir Markocf36d492015-08-12 19:27:26 +01003573 // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3574 static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3575 size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003576 void* array_storage = allocator->Alloc(self, storage_size);
Vladimir Markocf36d492015-08-12 19:27:26 +01003577 auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003578 CHECK(ret != nullptr);
3579 std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3580 return ret;
Mathieu Chartierc7853442015-03-27 14:35:38 -07003581}
3582
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003583LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3584 LinearAlloc* allocator,
3585 size_t length) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003586 if (length == 0) {
3587 return nullptr;
Mathieu Chartiere401d142015-04-22 13:56:20 -07003588 }
Vladimir Marko14632852015-08-17 12:07:23 +01003589 const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3590 const size_t method_size = ArtMethod::Size(image_pointer_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +01003591 const size_t storage_size =
3592 LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003593 void* array_storage = allocator->Alloc(self, storage_size);
Vladimir Markocf36d492015-08-12 19:27:26 +01003594 auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003595 CHECK(ret != nullptr);
3596 for (size_t i = 0; i < length; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +01003597 new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003598 }
3599 return ret;
Mathieu Chartiere401d142015-04-22 13:56:20 -07003600}
3601
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003602LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003603 if (class_loader == nullptr) {
3604 return Runtime::Current()->GetLinearAlloc();
3605 }
3606 LinearAlloc* allocator = class_loader->GetAllocator();
3607 DCHECK(allocator != nullptr);
3608 return allocator;
3609}
3610
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003611LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartierd57d4542015-10-14 10:55:30 -07003612 if (class_loader == nullptr) {
3613 return Runtime::Current()->GetLinearAlloc();
3614 }
3615 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3616 LinearAlloc* allocator = class_loader->GetAllocator();
3617 if (allocator == nullptr) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08003618 RegisterClassLoader(class_loader);
3619 allocator = class_loader->GetAllocator();
3620 CHECK(allocator != nullptr);
Mathieu Chartierd57d4542015-10-14 10:55:30 -07003621 }
3622 return allocator;
3623}
3624
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003625void ClassLinker::LoadClass(Thread* self,
3626 const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003627 const dex::ClassDef& dex_class_def,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003628 Handle<mirror::Class> klass) {
David Brazdil20c765f2018-10-27 21:45:15 +00003629 ClassAccessor accessor(dex_file,
3630 dex_class_def,
3631 /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003632 if (!accessor.HasClassData()) {
3633 return;
3634 }
3635 Runtime* const runtime = Runtime::Current();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003636 {
3637 // Note: We cannot have thread suspension until the field and method arrays are setup or else
3638 // Class::VisitFieldRoots may miss some fields or methods.
Mathieu Chartier268764d2016-09-13 12:09:38 -07003639 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003640 // Load static fields.
Vladimir Marko23682bf2015-06-24 14:28:03 +01003641 // We allow duplicate definitions of the same field in a class_data_item
3642 // but ignore the repeated indexes here, b/21868015.
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003643 LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003644 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3645 allocator,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003646 accessor.NumStaticFields());
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003647 LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3648 allocator,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003649 accessor.NumInstanceFields());
3650 size_t num_sfields = 0u;
Vladimir Marko23682bf2015-06-24 14:28:03 +01003651 size_t num_ifields = 0u;
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003652 uint32_t last_static_field_idx = 0u;
3653 uint32_t last_instance_field_idx = 0u;
Orion Hodsonc069a302017-01-18 09:23:12 +00003654
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003655 // Methods
3656 bool has_oat_class = false;
3657 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3658 ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3659 : OatFile::OatClass::Invalid();
3660 const OatFile::OatClass* oat_class_ptr = has_oat_class ? &oat_class : nullptr;
3661 klass->SetMethodsPtr(
3662 AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3663 accessor.NumDirectMethods(),
3664 accessor.NumVirtualMethods());
3665 size_t class_def_method_index = 0;
3666 uint32_t last_dex_method_index = dex::kDexNoIndex;
3667 size_t last_class_def_method_index = 0;
3668
3669 // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3670 // methods needs to decode all of the fields.
3671 accessor.VisitFieldsAndMethods([&](
3672 const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3673 uint32_t field_idx = field.GetIndex();
3674 DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier.
3675 if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
3676 LoadField(field, klass, &sfields->At(num_sfields));
3677 ++num_sfields;
3678 last_static_field_idx = field_idx;
3679 }
3680 }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3681 uint32_t field_idx = field.GetIndex();
3682 DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier.
3683 if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
3684 LoadField(field, klass, &ifields->At(num_ifields));
3685 ++num_ifields;
3686 last_instance_field_idx = field_idx;
3687 }
3688 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3689 ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
3690 image_pointer_size_);
3691 LoadMethod(dex_file, method, klass, art_method);
3692 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3693 uint32_t it_method_index = method.GetIndex();
3694 if (last_dex_method_index == it_method_index) {
3695 // duplicate case
3696 art_method->SetMethodIndex(last_class_def_method_index);
3697 } else {
3698 art_method->SetMethodIndex(class_def_method_index);
3699 last_dex_method_index = it_method_index;
3700 last_class_def_method_index = class_def_method_index;
3701 }
3702 ++class_def_method_index;
3703 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3704 ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
3705 class_def_method_index - accessor.NumDirectMethods(),
3706 image_pointer_size_);
3707 LoadMethod(dex_file, method, klass, art_method);
3708 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3709 ++class_def_method_index;
3710 });
3711
3712 if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
David Sehr709b0702016-10-13 09:12:37 -07003713 LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003714 << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
3715 << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
3716 << ")";
Vladimir Marko81819db2015-11-05 15:30:12 +00003717 // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
3718 if (sfields != nullptr) {
3719 sfields->SetSize(num_sfields);
3720 }
3721 if (ifields != nullptr) {
3722 ifields->SetSize(num_ifields);
3723 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07003724 }
Vladimir Marko81819db2015-11-05 15:30:12 +00003725 // Set the field arrays.
3726 klass->SetSFieldsPtr(sfields);
3727 DCHECK_EQ(klass->NumStaticFields(), num_sfields);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003728 klass->SetIFieldsPtr(ifields);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003729 DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
Ian Rogers0571d352011-11-03 19:51:38 -07003730 }
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07003731 // Ensure that the card is marked so that remembered sets pick up native roots.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003732 WriteBarrier::ForEveryFieldWrite(klass.Get());
Mathieu Chartierf3f2a7a2015-04-14 15:43:10 -07003733 self->AllowThreadSuspension();
Brian Carlstrom934486c2011-07-12 23:42:50 -07003734}
3735
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003736void ClassLinker::LoadField(const ClassAccessor::Field& field,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003737 Handle<mirror::Class> klass,
Mathieu Chartierc7853442015-03-27 14:35:38 -07003738 ArtField* dst) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003739 const uint32_t field_idx = field.GetIndex();
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003740 dst->SetDexFieldIndex(field_idx);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003741 dst->SetDeclaringClass(klass.Get());
David Brazdilf6a8a552018-01-15 18:10:50 +00003742
David Brazdil85865692018-10-30 17:26:20 +00003743 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3744 dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
Brian Carlstrom934486c2011-07-12 23:42:50 -07003745}
3746
Mathieu Chartier268764d2016-09-13 12:09:38 -07003747void ClassLinker::LoadMethod(const DexFile& dex_file,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003748 const ClassAccessor::Method& method,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003749 Handle<mirror::Class> klass,
3750 ArtMethod* dst) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003751 const uint32_t dex_method_idx = method.GetIndex();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003752 const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
Ian Rogersdfb325e2013-10-30 01:00:44 -07003753 const char* method_name = dex_file.StringDataByIdx(method_id.name_idx_);
Mathieu Chartier66f19252012-09-18 08:57:04 -07003754
Mathieu Chartier268764d2016-09-13 12:09:38 -07003755 ScopedAssertNoThreadSuspension ants("LoadMethod");
Mathieu Chartier66f19252012-09-18 08:57:04 -07003756 dst->SetDexMethodIndex(dex_method_idx);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003757 dst->SetDeclaringClass(klass.Get());
Brian Carlstrom934486c2011-07-12 23:42:50 -07003758
David Brazdil85865692018-10-30 17:26:20 +00003759 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3760 uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
David Brazdilf6a8a552018-01-15 18:10:50 +00003761
Ian Rogersdfb325e2013-10-30 01:00:44 -07003762 if (UNLIKELY(strcmp("finalize", method_name) == 0)) {
Ian Rogers241b5de2013-10-09 17:58:57 -07003763 // Set finalizable flag on declaring class.
Ian Rogersdfb325e2013-10-30 01:00:44 -07003764 if (strcmp("V", dex_file.GetShorty(method_id.proto_idx_)) == 0) {
3765 // Void return type.
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003766 if (klass->GetClassLoader() != nullptr) { // All non-boot finalizer methods are flagged.
Ian Rogersdfb325e2013-10-30 01:00:44 -07003767 klass->SetFinalizable();
3768 } else {
Ian Rogers1ff3c982014-08-12 02:30:58 -07003769 std::string temp;
3770 const char* klass_descriptor = klass->GetDescriptor(&temp);
Ian Rogersdfb325e2013-10-30 01:00:44 -07003771 // The Enum class declares a "final" finalize() method to prevent subclasses from
3772 // introducing a finalizer. We don't want to set the finalizable flag for Enum or its
3773 // subclasses, so we exclude it here.
3774 // We also want to avoid setting the flag on Object, where we know that finalize() is
3775 // empty.
Ian Rogers1ff3c982014-08-12 02:30:58 -07003776 if (strcmp(klass_descriptor, "Ljava/lang/Object;") != 0 &&
3777 strcmp(klass_descriptor, "Ljava/lang/Enum;") != 0) {
Ian Rogers241b5de2013-10-09 17:58:57 -07003778 klass->SetFinalizable();
Ian Rogers241b5de2013-10-09 17:58:57 -07003779 }
3780 }
3781 }
3782 } else if (method_name[0] == '<') {
3783 // Fix broken access flags for initializers. Bug 11157540.
Ian Rogersdfb325e2013-10-30 01:00:44 -07003784 bool is_init = (strcmp("<init>", method_name) == 0);
3785 bool is_clinit = !is_init && (strcmp("<clinit>", method_name) == 0);
Ian Rogers241b5de2013-10-09 17:58:57 -07003786 if (UNLIKELY(!is_init && !is_clinit)) {
3787 LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
3788 } else {
3789 if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
3790 LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
David Sehr709b0702016-10-13 09:12:37 -07003791 << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
Ian Rogers241b5de2013-10-09 17:58:57 -07003792 access_flags |= kAccConstructor;
3793 }
3794 }
3795 }
Vladimir Markob0a6aee2017-10-27 10:34:04 +01003796 if (UNLIKELY((access_flags & kAccNative) != 0u)) {
3797 // Check if the native method is annotated with @FastNative or @CriticalNative.
3798 access_flags |= annotations::GetNativeMethodAnnotationAccessFlags(
3799 dex_file, dst->GetClassDef(), dex_method_idx);
3800 }
Ian Rogers241b5de2013-10-09 17:58:57 -07003801 dst->SetAccessFlags(access_flags);
David Srbeckye36e7f22018-11-14 14:21:23 +00003802 // Must be done after SetAccessFlags since IsAbstract depends on it.
3803 if (klass->IsInterface() && dst->IsAbstract()) {
3804 dst->CalculateAndSetImtIndex();
3805 }
Nicolas Geoffray47171752020-08-31 15:03:20 +01003806 if (dst->HasCodeItem()) {
3807 DCHECK_NE(method.GetCodeItemOffset(), 0u);
3808 if (Runtime::Current()->IsAotCompiler()) {
3809 dst->SetDataPtrSize(reinterpret_cast32<void*>(method.GetCodeItemOffset()), image_pointer_size_);
3810 } else {
Nicolas Geoffraye1d2dce2020-09-21 10:06:31 +01003811 dst->SetCodeItem(dst->GetDexFile()->GetCodeItem(method.GetCodeItemOffset()));
Nicolas Geoffray47171752020-08-31 15:03:20 +01003812 }
3813 } else {
3814 dst->SetDataPtrSize(nullptr, image_pointer_size_);
3815 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
3816 }
Nicolas Geoffray43c9cd72021-03-10 15:09:19 +00003817
3818 // Set optimization flags related to the shorty.
3819 const char* shorty = dst->GetShorty();
3820 bool all_parameters_are_reference = true;
3821 bool all_parameters_are_reference_or_int = true;
3822 bool return_type_is_fp = (shorty[0] == 'F' || shorty[0] == 'D');
3823
3824 for (size_t i = 1, e = strlen(shorty); i < e; ++i) {
3825 if (shorty[i] != 'L') {
3826 all_parameters_are_reference = false;
3827 if (shorty[i] == 'F' || shorty[i] == 'D' || shorty[i] == 'J') {
3828 all_parameters_are_reference_or_int = false;
3829 break;
3830 }
3831 }
3832 }
3833
3834 if (!dst->IsNative() && all_parameters_are_reference) {
3835 dst->SetNterpEntryPointFastPathFlag();
3836 }
3837
3838 if (!return_type_is_fp && all_parameters_are_reference_or_int) {
3839 dst->SetNterpInvokeFastPathFlag();
3840 }
Brian Carlstrom934486c2011-07-12 23:42:50 -07003841}
3842
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003843void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
Orion Hodsonb9b7d912021-02-24 09:24:47 +00003844 ObjPtr<mirror::DexCache> dex_cache = AllocAndInitializeDexCache(
3845 self,
3846 *dex_file,
3847 Runtime::Current()->GetLinearAlloc());
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003848 CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
Brian Carlstrom40381fb2011-10-19 14:13:40 -07003849 AppendToBootClassPath(dex_file, dex_cache);
Brian Carlstroma663ea52011-08-19 23:33:41 -07003850}
3851
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003852void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00003853 ObjPtr<mirror::DexCache> dex_cache) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003854 CHECK(dex_file != nullptr);
3855 CHECK(dex_cache != nullptr) << dex_file->GetLocation();
3856 boot_class_path_.push_back(dex_file);
Andreas Gampebe7af222017-07-25 09:57:28 -07003857 WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003858 RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003859}
3860
Mathieu Chartierc528dba2013-11-26 12:00:11 -08003861void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00003862 ObjPtr<mirror::DexCache> dex_cache,
3863 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003864 Thread* const self = Thread::Current();
Andreas Gampecc1b5352016-12-01 16:58:38 -08003865 Locks::dex_lock_->AssertExclusiveHeld(self);
Vladimir Markocd556b02017-02-03 11:47:34 +00003866 CHECK(dex_cache != nullptr) << dex_file.GetLocation();
David Srbecky86d6cd52020-12-02 18:13:10 +00003867 CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003868 // For app images, the dex cache location may be a suffix of the dex file location since the
3869 // dex file location is an absolute path.
Mathieu Chartier76172162016-01-26 14:54:06 -08003870 const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
3871 const size_t dex_cache_length = dex_cache_location.length();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003872 CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
3873 std::string dex_file_location = dex_file.GetLocation();
Nicolas Geoffraye3e0f702019-03-12 07:02:02 +00003874 // The following paths checks don't work on preopt when using boot dex files, where the dex
3875 // cache location is the one on device, and the dex_file's location is the one on host.
3876 if (!(Runtime::Current()->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
3877 CHECK_GE(dex_file_location.length(), dex_cache_length)
3878 << dex_cache_location << " " << dex_file.GetLocation();
3879 const std::string dex_file_suffix = dex_file_location.substr(
3880 dex_file_location.length() - dex_cache_length,
3881 dex_cache_length);
3882 // Example dex_cache location is SettingsProvider.apk and
3883 // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
3884 CHECK_EQ(dex_cache_location, dex_file_suffix);
3885 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003886 const OatFile* oat_file =
3887 (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
Vladimir Markob066d432018-01-03 13:14:37 +00003888 // Clean up pass to remove null dex caches; null dex caches can occur due to class unloading
3889 // and we are lazily removing null entries. Also check if we need to initialize OatFile data
3890 // (.data.bimg.rel.ro and .bss sections) needed for code execution.
3891 bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
Ian Rogers55256cb2017-12-21 17:07:11 -08003892 JavaVMExt* const vm = self->GetJniEnv()->GetVm();
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08003893 for (auto it = dex_caches_.begin(); it != dex_caches_.end(); ) {
3894 DexCacheData data = *it;
3895 if (self->IsJWeakCleared(data.weak_root)) {
3896 vm->DeleteWeakGlobalRef(self, data.weak_root);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003897 it = dex_caches_.erase(it);
3898 } else {
Vladimir Markob066d432018-01-03 13:14:37 +00003899 if (initialize_oat_file_data &&
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003900 it->dex_file->GetOatDexFile() != nullptr &&
3901 it->dex_file->GetOatDexFile()->GetOatFile() == oat_file) {
Vladimir Markob066d432018-01-03 13:14:37 +00003902 initialize_oat_file_data = false; // Already initialized.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003903 }
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003904 ++it;
3905 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07003906 }
Vladimir Markob066d432018-01-03 13:14:37 +00003907 if (initialize_oat_file_data) {
Vladimir Marko1cedb4a2019-02-06 14:13:28 +00003908 oat_file->InitializeRelocations();
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003909 }
David Brazdila5c3a802019-03-08 14:59:41 +00003910 // Let hiddenapi assign a domain to the newly registered dex file.
3911 hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
3912
Vladimir Markocd556b02017-02-03 11:47:34 +00003913 jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08003914 DexCacheData data;
3915 data.weak_root = dex_cache_jweak;
3916 data.dex_file = dex_cache->GetDexFile();
Vladimir Markocd556b02017-02-03 11:47:34 +00003917 data.class_table = ClassTableForClassLoader(class_loader);
David Srbeckyafc60cd2018-12-05 11:59:31 +00003918 AddNativeDebugInfoForDex(self, data.dex_file);
Vladimir Markocd556b02017-02-03 11:47:34 +00003919 DCHECK(data.class_table != nullptr);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003920 // Make sure to hold the dex cache live in the class table. This case happens for the boot class
3921 // path dex caches without an image.
3922 data.class_table->InsertStrongRoot(dex_cache);
Andreas Gampe8a1a0f72020-03-03 16:07:45 -08003923 // Make sure that the dex cache holds the classloader live.
3924 dex_cache->SetClassLoader(class_loader);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003925 if (class_loader != nullptr) {
3926 // Since we added a strong root to the class table, do the write barrier as required for
3927 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003928 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003929 }
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08003930 dex_caches_.push_back(data);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003931}
3932
Alex Light725da8f2020-02-19 14:46:33 -08003933ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
3934 return data != nullptr
3935 ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
Vladimir Markocd556b02017-02-03 11:47:34 +00003936 : nullptr;
3937}
3938
Alex Light725da8f2020-02-19 14:46:33 -08003939bool ClassLinker::IsSameClassLoader(
Vladimir Markocd556b02017-02-03 11:47:34 +00003940 ObjPtr<mirror::DexCache> dex_cache,
Alex Light725da8f2020-02-19 14:46:33 -08003941 const DexCacheData* data,
Vladimir Markocd556b02017-02-03 11:47:34 +00003942 ObjPtr<mirror::ClassLoader> class_loader) {
Alex Light725da8f2020-02-19 14:46:33 -08003943 CHECK(data != nullptr);
3944 DCHECK_EQ(dex_cache->GetDexFile(), data->dex_file);
3945 return data->class_table == ClassTableForClassLoader(class_loader);
Vladimir Markocd556b02017-02-03 11:47:34 +00003946}
3947
Alex Light07f06212017-06-01 14:01:43 -07003948void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
3949 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartiered4ee442018-06-05 14:23:35 -07003950 SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
Alex Light07f06212017-06-01 14:01:43 -07003951 Thread* self = Thread::Current();
3952 StackHandleScope<2> hs(self);
3953 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
3954 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
3955 const DexFile* dex_file = dex_cache->GetDexFile();
3956 DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
3957 if (kIsDebugBuild) {
Alex Light725da8f2020-02-19 14:46:33 -08003958 ReaderMutexLock mu(self, *Locks::dex_lock_);
3959 const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
3960 ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
Alex Light07f06212017-06-01 14:01:43 -07003961 DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
3962 << "been registered on dex file " << dex_file->GetLocation();
3963 }
3964 ClassTable* table;
3965 {
3966 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3967 table = InsertClassTableForClassLoader(h_class_loader.Get());
3968 }
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03003969 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
3970 // a thread holding the dex lock and blocking on a condition variable regarding
3971 // weak references access, and a thread blocking on the dex lock.
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03003972 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
Alex Light07f06212017-06-01 14:01:43 -07003973 WriterMutexLock mu(self, *Locks::dex_lock_);
3974 RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
3975 table->InsertStrongRoot(h_dex_cache.Get());
3976 if (h_class_loader.Get() != nullptr) {
3977 // Since we added a strong root to the class table, do the write barrier as required for
3978 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003979 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
Alex Light07f06212017-06-01 14:01:43 -07003980 }
3981}
3982
Alex Lightde7f8782020-02-24 10:14:22 -08003983static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
3984 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light725da8f2020-02-19 14:46:33 -08003985 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
Alex Lightde7f8782020-02-24 10:14:22 -08003986 "Attempt to register dex file %s with multiple class loaders",
3987 dex_file.GetLocation().c_str());
Alex Light725da8f2020-02-19 14:46:33 -08003988}
3989
Vladimir Markocd556b02017-02-03 11:47:34 +00003990ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
3991 ObjPtr<mirror::ClassLoader> class_loader) {
Ian Rogers1f539342012-10-03 21:09:42 -07003992 Thread* self = Thread::Current();
Alex Light725da8f2020-02-19 14:46:33 -08003993 ObjPtr<mirror::DexCache> old_dex_cache;
3994 bool registered_with_another_class_loader = false;
Brian Carlstrom47d237a2011-10-18 15:08:33 -07003995 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08003996 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08003997 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
3998 old_dex_cache = DecodeDexCacheLocked(self, old_data);
3999 if (old_dex_cache != nullptr) {
4000 if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
4001 return old_dex_cache;
4002 } else {
4003 // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
4004 // be thrown when it's safe to do so to simplify this.
4005 registered_with_another_class_loader = true;
4006 }
4007 }
Vladimir Markocd556b02017-02-03 11:47:34 +00004008 }
Alex Light725da8f2020-02-19 14:46:33 -08004009 // We need to have released the dex_lock_ to allocate safely.
4010 if (registered_with_another_class_loader) {
4011 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4012 return nullptr;
Brian Carlstromaded5f72011-10-07 17:15:04 -07004013 }
Mathieu Chartiered4ee442018-06-05 14:23:35 -07004014 SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004015 LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
4016 DCHECK(linear_alloc != nullptr);
4017 ClassTable* table;
4018 {
4019 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4020 table = InsertClassTableForClassLoader(class_loader);
4021 }
Brian Carlstrom47d237a2011-10-18 15:08:33 -07004022 // Don't alloc while holding the lock, since allocation may need to
4023 // suspend all threads and another thread may need the dex_lock_ to
4024 // get to a suspend point.
Vladimir Markocd556b02017-02-03 11:47:34 +00004025 StackHandleScope<3> hs(self);
4026 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
David Srbecky86d6cd52020-12-02 18:13:10 +00004027 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004028 {
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004029 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4030 // a thread holding the dex lock and blocking on a condition variable regarding
4031 // weak references access, and a thread blocking on the dex lock.
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004032 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
Andreas Gampecc1b5352016-12-01 16:58:38 -08004033 WriterMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004034 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4035 old_dex_cache = DecodeDexCacheLocked(self, old_data);
Andreas Gampefa4333d2017-02-14 11:10:34 -08004036 if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
Orion Hodsonb9b7d912021-02-24 09:24:47 +00004037 // Do InitializeNativeFields while holding dex lock to make sure two threads don't call it
David Srbecky86d6cd52020-12-02 18:13:10 +00004038 // at the same time with the same dex cache. Since the .bss is shared this can cause failing
4039 // DCHECK that the arrays are null.
Orion Hodsonb9b7d912021-02-24 09:24:47 +00004040 h_dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
Vladimir Markocd556b02017-02-03 11:47:34 +00004041 RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004042 }
Alex Light725da8f2020-02-19 14:46:33 -08004043 if (old_dex_cache != nullptr) {
4044 // Another thread managed to initialize the dex cache faster, so use that DexCache.
4045 // If this thread encountered OOME, ignore it.
4046 DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
4047 self->ClearException();
4048 // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
4049 // dex_lock_.
4050 if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
4051 return old_dex_cache;
4052 } else {
4053 registered_with_another_class_loader = true;
4054 }
4055 }
Vladimir Markocd556b02017-02-03 11:47:34 +00004056 }
Alex Light725da8f2020-02-19 14:46:33 -08004057 if (registered_with_another_class_loader) {
4058 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4059 return nullptr;
Vladimir Markocd556b02017-02-03 11:47:34 +00004060 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08004061 if (h_dex_cache == nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00004062 self->AssertPendingOOMException();
4063 return nullptr;
Brian Carlstrom47d237a2011-10-18 15:08:33 -07004064 }
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004065 table->InsertStrongRoot(h_dex_cache.Get());
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004066 if (h_class_loader.Get() != nullptr) {
4067 // Since we added a strong root to the class table, do the write barrier as required for
4068 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004069 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004070 }
Nicolas Geoffray1d4f0092020-08-07 14:01:05 +01004071 VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
Nicolas Geoffray4f6bb442021-06-02 18:05:51 +01004072 PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004073 return h_dex_cache.Get();
Brian Carlstromaded5f72011-10-07 17:15:04 -07004074}
4075
Vladimir Markocd556b02017-02-03 11:47:34 +00004076bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08004077 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004078 return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004079}
4080
Vladimir Markocd556b02017-02-03 11:47:34 +00004081ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4082 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004083 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4084 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
Vladimir Markocd556b02017-02-03 11:47:34 +00004085 if (dex_cache != nullptr) {
4086 return dex_cache;
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07004087 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004088 // Failure, dump diagnostic and abort.
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08004089 for (const DexCacheData& data : dex_caches_) {
Alex Light725da8f2020-02-19 14:46:33 -08004090 if (DecodeDexCacheLocked(self, &data) != nullptr) {
Andreas Gampe37c58462017-03-27 15:14:27 -07004091 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << data.dex_file->GetLocation();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004092 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004093 }
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004094 LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
Alex Light725da8f2020-02-19 14:46:33 -08004095 << " " << &dex_file << " " << dex_cache_data->dex_file;
Ian Rogerse0a02da2014-12-02 14:10:53 -08004096 UNREACHABLE();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004097}
4098
Vladimir Markocd556b02017-02-03 11:47:34 +00004099ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4100 const DexFile* dex_file = dex_cache->GetDexFile();
4101 DCHECK(dex_file != nullptr);
4102 ReaderMutexLock mu(self, *Locks::dex_lock_);
4103 // Search assuming unique-ness of dex file.
4104 for (const DexCacheData& data : dex_caches_) {
4105 // Avoid decoding (and read barriers) other unrelated dex caches.
4106 if (data.dex_file == dex_file) {
Alex Light725da8f2020-02-19 14:46:33 -08004107 ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
Vladimir Markocd556b02017-02-03 11:47:34 +00004108 if (registered_dex_cache != nullptr) {
4109 CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4110 return data.class_table;
4111 }
4112 }
4113 }
4114 return nullptr;
4115}
4116
Alex Light725da8f2020-02-19 14:46:33 -08004117const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
Vladimir Markocd556b02017-02-03 11:47:34 +00004118 // Search assuming unique-ness of dex file.
4119 for (const DexCacheData& data : dex_caches_) {
4120 // Avoid decoding (and read barriers) other unrelated dex caches.
4121 if (data.dex_file == &dex_file) {
Alex Light725da8f2020-02-19 14:46:33 -08004122 return &data;
Vladimir Markocd556b02017-02-03 11:47:34 +00004123 }
4124 }
Alex Light725da8f2020-02-19 14:46:33 -08004125 return nullptr;
Vladimir Markocd556b02017-02-03 11:47:34 +00004126}
4127
Vladimir Marko70e2a762019-07-12 16:49:00 +01004128void ClassLinker::CreatePrimitiveClass(Thread* self,
4129 Primitive::Type type,
4130 ClassRoot primitive_root) {
Vladimir Markoacb906d2018-05-30 10:23:49 +01004131 ObjPtr<mirror::Class> primitive_class =
Mathieu Chartier6beced42016-11-15 15:51:31 -08004132 AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
Vladimir Marko70e2a762019-07-12 16:49:00 +01004133 CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4134 // Do not hold lock on the primitive class object, the initialization of
4135 // primitive classes is done while the process is still single threaded.
Vladimir Markob68bb7a2020-03-17 10:55:25 +00004136 primitive_class->SetAccessFlagsDuringLinking(
Vladimir Marko70e2a762019-07-12 16:49:00 +01004137 kAccPublic | kAccFinal | kAccAbstract | kAccVerificationAttempted);
4138 primitive_class->SetPrimitiveType(type);
4139 primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4140 // Skip EnsureSkipAccessChecksMethods(). We can skip the verified status,
4141 // the kAccVerificationAttempted flag was added above, and there are no
4142 // methods that need the kAccSkipAccessChecks flag.
4143 DCHECK_EQ(primitive_class->NumMethods(), 0u);
Vladimir Markobf121912019-06-04 13:49:05 +01004144 // Primitive classes are initialized during single threaded startup, so visibly initialized.
4145 primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004146 const char* descriptor = Primitive::Descriptor(type);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004147 ObjPtr<mirror::Class> existing = InsertClass(descriptor,
Vladimir Marko70e2a762019-07-12 16:49:00 +01004148 primitive_class,
Mathieu Chartier6beced42016-11-15 15:51:31 -08004149 ComputeModifiedUtf8Hash(descriptor));
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004150 CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
Vladimir Marko70e2a762019-07-12 16:49:00 +01004151 SetClassRoot(primitive_root, primitive_class);
Carl Shapiro565f5072011-07-10 13:39:43 -07004152}
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004153
Vladimir Marko02610552018-06-04 14:38:00 +01004154inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4155 return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4156}
4157
Brian Carlstrombe977852011-07-19 14:54:54 -07004158// Create an array class (i.e. the class object for the array, not the
4159// array itself). "descriptor" looks like "[C" or "[[[[B" or
4160// "[Ljava/lang/String;".
4161//
4162// If "descriptor" refers to an array of primitives, look up the
4163// primitive type's internally-generated class object.
4164//
Brian Carlstrom5b8e4c82011-09-18 01:38:59 -07004165// "class_loader" is the class loader of the class that's referring to
4166// us. It's used to ensure that we're looking for the element type in
4167// the right context. It does NOT become the class loader for the
4168// array class; that always comes from the base element class.
Brian Carlstrombe977852011-07-19 14:54:54 -07004169//
Mathieu Chartier2cebb242015-04-21 16:50:40 -07004170// Returns null with an exception raised on failure.
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004171ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4172 const char* descriptor,
4173 size_t hash,
4174 Handle<mirror::ClassLoader> class_loader) {
Brian Carlstrom5b8e4c82011-09-18 01:38:59 -07004175 // Identify the underlying component type
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004176 CHECK_EQ('[', descriptor[0]);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004177 StackHandleScope<2> hs(self);
Alex Lighte9f61032018-09-24 16:04:51 -07004178
4179 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4180 // code to be executed. We put it up here so we can avoid all the allocations associated with
4181 // creating the class. This can happen with (eg) jit threads.
4182 if (!self->CanLoadClasses()) {
4183 // Make sure we don't try to load anything, potentially causing an infinite loop.
4184 ObjPtr<mirror::Throwable> pre_allocated =
4185 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4186 self->SetException(pre_allocated);
4187 return nullptr;
4188 }
4189
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07004190 MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4191 class_loader)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004192 if (component_type == nullptr) {
Mathieu Chartierc0a9ea42014-02-03 16:36:49 -08004193 DCHECK(self->IsExceptionPending());
Andreas Gampedc13d7d2014-07-23 20:18:36 -07004194 // We need to accept erroneous classes as component types.
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08004195 const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4196 component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004197 if (component_type == nullptr) {
Andreas Gampedc13d7d2014-07-23 20:18:36 -07004198 DCHECK(self->IsExceptionPending());
4199 return nullptr;
4200 } else {
4201 self->ClearException();
4202 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004203 }
Ian Rogers2d10b202014-05-12 19:15:18 -07004204 if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4205 ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4206 return nullptr;
4207 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004208 // See if the component type is already loaded. Array classes are
4209 // always associated with the class loader of their underlying
4210 // element type -- an array of Strings goes with the loader for
4211 // java/lang/String -- so we need to look for it there. (The
4212 // caller should have checked for the existence of the class
4213 // before calling here, but they did so with *their* class loader,
4214 // not the component type's loader.)
4215 //
4216 // If we find it, the caller adds "loader" to the class' initiating
4217 // loader list, which should prevent us from going through this again.
4218 //
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07004219 // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004220 // are the same, because our caller (FindClass) just did the
4221 // lookup. (Even if we get this wrong we still have correct behavior,
4222 // because we effectively do this lookup again when we add the new
4223 // class to the hash table --- necessary because of possible races with
4224 // other threads.)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004225 if (class_loader.Get() != component_type->GetClassLoader()) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00004226 ObjPtr<mirror::Class> new_class =
4227 LookupClass(self, descriptor, hash, component_type->GetClassLoader());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004228 if (new_class != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004229 return new_class;
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004230 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004231 }
Vladimir Marko70e2a762019-07-12 16:49:00 +01004232 // Core array classes, i.e. Object[], Class[], String[] and primitive
4233 // arrays, have special initialization and they should be found above.
4234 DCHECK(!component_type->IsObjectClass() ||
4235 // Guard from false positives for errors before setting superclass.
4236 component_type->IsErroneousUnresolved());
4237 DCHECK(!component_type->IsStringClass());
4238 DCHECK(!component_type->IsClassClass());
4239 DCHECK(!component_type->IsPrimitive());
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004240
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004241 // Fill out the fields in the Class.
4242 //
4243 // It is possible to execute some methods against arrays, because
4244 // all arrays are subclasses of java_lang_Object_, so we need to set
4245 // up a vtable. We can just point at the one in java_lang_Object_.
4246 //
4247 // Array classes are simple enough that we don't need to do a full
4248 // link step.
Vladimir Marko70e2a762019-07-12 16:49:00 +01004249 size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4250 auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4251 size_t usable_size)
4252 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob68bb7a2020-03-17 10:55:25 +00004253 ScopedAssertNoNewTransactionRecords sanntr("CreateArrayClass");
Vladimir Marko70e2a762019-07-12 16:49:00 +01004254 mirror::Class::InitializeClassVisitor init_class(array_class_size);
4255 init_class(obj, usable_size);
4256 ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4257 klass->SetComponentType(component_type.Get());
4258 // Do not hold lock for initialization, the fence issued after the visitor
4259 // returns ensures memory visibility together with the implicit consume
4260 // semantics (for all supported architectures) for any thread that loads
4261 // the array class reference from any memory locations afterwards.
4262 FinishArrayClassSetup(klass);
4263 };
4264 auto new_class = hs.NewHandle<mirror::Class>(
4265 AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004266 if (new_class == nullptr) {
Vladimir Marko70e2a762019-07-12 16:49:00 +01004267 self->AssertPendingOOMException();
4268 return nullptr;
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004269 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004270
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004271 ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
Mathieu Chartierc0a9ea42014-02-03 16:36:49 -08004272 if (existing == nullptr) {
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004273 // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4274 // duplicate events in case of races. Array classes don't really follow dedicated
4275 // load and prepare, anyways.
4276 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4277 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4278
Tamas Berghammer160e6df2016-01-05 14:29:02 +00004279 jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004280 return new_class.Get();
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004281 }
4282 // Another thread must have loaded the class after we
4283 // started but before we finished. Abandon what we've
4284 // done.
4285 //
4286 // (Yes, this happens.)
4287
Vladimir Markobcf17522018-06-01 13:14:32 +01004288 return existing;
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004289}
4290
Vladimir Marko9186b182018-11-06 14:55:54 +00004291ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4292 ClassRoot class_root;
Ian Rogers62f05122014-03-21 11:21:29 -07004293 switch (type) {
Vladimir Marko9186b182018-11-06 14:55:54 +00004294 case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4295 case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4296 case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4297 case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4298 case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4299 case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4300 case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4301 case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4302 case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
Ian Rogers62f05122014-03-21 11:21:29 -07004303 default:
Vladimir Marko9186b182018-11-06 14:55:54 +00004304 return nullptr;
Carl Shapiro744ad052011-08-06 15:53:36 -07004305 }
Vladimir Marko9186b182018-11-06 14:55:54 +00004306 return GetClassRoot(class_root, this);
4307}
4308
4309ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4310 ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4311 if (UNLIKELY(result == nullptr)) {
4312 std::string printable_type(PrintableChar(type));
4313 ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4314 }
4315 return result;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004316}
4317
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004318ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4319 ObjPtr<mirror::Class> klass,
4320 size_t hash) {
Alex Lighte9f61032018-09-24 16:04:51 -07004321 DCHECK(Thread::Current()->CanLoadClasses());
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08004322 if (VLOG_IS_ON(class_linker)) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004323 ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
Brian Carlstromae826982011-11-09 01:33:42 -08004324 std::string source;
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004325 if (dex_cache != nullptr) {
Brian Carlstromae826982011-11-09 01:33:42 -08004326 source += " from ";
4327 source += dex_cache->GetLocation()->ToModifiedUtf8();
4328 }
4329 LOG(INFO) << "Loaded class " << descriptor << source;
4330 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004331 {
4332 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Vladimir Marko0984e482019-03-27 16:41:41 +00004333 const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
Mathieu Chartier65975772016-08-05 10:46:36 -07004334 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004335 ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004336 if (existing != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004337 return existing;
Mathieu Chartier65975772016-08-05 10:46:36 -07004338 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004339 VerifyObject(klass);
4340 class_table->InsertWithHash(klass, hash);
4341 if (class_loader != nullptr) {
4342 // This is necessary because we need to have the card dirtied for remembered sets.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004343 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier65975772016-08-05 10:46:36 -07004344 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004345 if (log_new_roots_) {
Mathieu Chartier65975772016-08-05 10:46:36 -07004346 new_class_roots_.push_back(GcRoot<mirror::Class>(klass));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004347 }
4348 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004349 if (kIsDebugBuild) {
4350 // Test that copied methods correctly can find their holder.
4351 for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4352 CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4353 }
Mathieu Chartier893263b2014-03-04 11:07:42 -08004354 }
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004355 return nullptr;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004356}
4357
Vladimir Marko1998cd02017-01-13 13:02:58 +00004358void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004359 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4360 DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4361 if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4362 new_bss_roots_boot_oat_files_.push_back(oat_file);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004363 }
4364}
4365
Alex Lighte64300b2015-12-15 15:02:47 -08004366// TODO This should really be in mirror::Class.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004367void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
Alex Lighte64300b2015-12-15 15:02:47 -08004368 LengthPrefixedArray<ArtMethod>* new_methods) {
4369 klass->SetMethodsPtrUnchecked(new_methods,
4370 klass->NumDirectMethods(),
4371 klass->NumDeclaredVirtualMethods());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004372 // Need to mark the card so that the remembered sets and mod union tables get updated.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004373 WriteBarrier::ForEveryFieldWrite(klass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004374}
4375
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004376ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4377 const char* descriptor,
4378 ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampe2ff3b972017-06-05 18:14:53 -07004379 return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4380}
4381
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004382ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4383 const char* descriptor,
4384 size_t hash,
4385 ObjPtr<mirror::ClassLoader> class_loader) {
Vladimir Marko1a1de672016-10-13 12:53:15 +01004386 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4387 ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4388 if (class_table != nullptr) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004389 ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
Vladimir Marko1a1de672016-10-13 12:53:15 +01004390 if (result != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004391 return result;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004392 }
Sameer Abu Asal2c6de222013-05-02 17:38:59 -07004393 }
Vladimir Marko1a1de672016-10-13 12:53:15 +01004394 return nullptr;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004395}
4396
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004397class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4398 public:
Igor Murashkin2ffb7032017-11-08 13:35:21 -08004399 MoveClassTableToPreZygoteVisitor() {}
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004400
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004401 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004402 REQUIRES(Locks::classlinker_classes_lock_)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01004403 REQUIRES_SHARED(Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004404 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier6b069532015-08-05 15:08:12 -07004405 if (class_table != nullptr) {
4406 class_table->FreezeSnapshot();
4407 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07004408 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004409};
4410
4411void ClassLinker::MoveClassTableToPreZygote() {
4412 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Andreas Gampe2af99022017-04-25 08:32:59 -07004413 boot_class_table_->FreezeSnapshot();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004414 MoveClassTableToPreZygoteVisitor visitor;
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07004415 VisitClassLoaders(&visitor);
Mathieu Chartierc2e20622014-11-03 11:41:47 -08004416}
4417
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004418// Look up classes by hash and descriptor and put all matching ones in the result array.
4419class LookupClassesVisitor : public ClassLoaderVisitor {
4420 public:
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004421 LookupClassesVisitor(const char* descriptor,
4422 size_t hash,
4423 std::vector<ObjPtr<mirror::Class>>* result)
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004424 : descriptor_(descriptor),
4425 hash_(hash),
4426 result_(result) {}
4427
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004428 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01004429 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004430 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004431 ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00004432 // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4433 if (klass != nullptr && klass->GetClassLoader() == class_loader) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004434 result_->push_back(klass);
4435 }
4436 }
4437
4438 private:
4439 const char* const descriptor_;
4440 const size_t hash_;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004441 std::vector<ObjPtr<mirror::Class>>* const result_;
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004442};
4443
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004444void ClassLinker::LookupClasses(const char* descriptor,
4445 std::vector<ObjPtr<mirror::Class>>& result) {
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004446 result.clear();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004447 Thread* const self = Thread::Current();
4448 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004449 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Andreas Gampe2af99022017-04-25 08:32:59 -07004450 ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004451 if (klass != nullptr) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00004452 DCHECK(klass->GetClassLoader() == nullptr);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004453 result.push_back(klass);
4454 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004455 LookupClassesVisitor visitor(descriptor, hash, &result);
4456 VisitClassLoaders(&visitor);
Elliott Hughes6fa602d2011-12-02 17:54:25 -08004457}
4458
Alex Lightf1f10492015-10-07 16:08:36 -07004459bool ClassLinker::AttemptSupertypeVerification(Thread* self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004460 verifier::VerifierDeps* verifier_deps,
Alex Lightf1f10492015-10-07 16:08:36 -07004461 Handle<mirror::Class> klass,
4462 Handle<mirror::Class> supertype) {
4463 DCHECK(self != nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08004464 DCHECK(klass != nullptr);
4465 DCHECK(supertype != nullptr);
Alex Lightf1f10492015-10-07 16:08:36 -07004466
Alex Lightf1f10492015-10-07 16:08:36 -07004467 if (!supertype->IsVerified() && !supertype->IsErroneous()) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004468 VerifyClass(self, verifier_deps, supertype);
Alex Lightf1f10492015-10-07 16:08:36 -07004469 }
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004470
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004471 if (supertype->IsVerified()
4472 || supertype->ShouldVerifyAtRuntime()
4473 || supertype->IsVerifiedNeedsAccessChecks()) {
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004474 // The supertype is either verified, or we soft failed at AOT time.
4475 DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
Alex Lightf1f10492015-10-07 16:08:36 -07004476 return true;
4477 }
4478 // If we got this far then we have a hard failure.
4479 std::string error_msg =
4480 StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
David Sehr709b0702016-10-13 09:12:37 -07004481 klass->PrettyDescriptor().c_str(),
4482 supertype->PrettyDescriptor().c_str());
Alex Lightf1f10492015-10-07 16:08:36 -07004483 LOG(WARNING) << error_msg << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004484 StackHandleScope<1> hs(self);
Alex Lightf1f10492015-10-07 16:08:36 -07004485 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004486 if (cause != nullptr) {
Alex Lightf1f10492015-10-07 16:08:36 -07004487 // Set during VerifyClass call (if at all).
4488 self->ClearException();
4489 }
4490 // Change into a verify error.
4491 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
Andreas Gampefa4333d2017-02-14 11:10:34 -08004492 if (cause != nullptr) {
Alex Lightf1f10492015-10-07 16:08:36 -07004493 self->GetException()->SetCause(cause.Get());
4494 }
4495 ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4496 if (Runtime::Current()->IsAotCompiler()) {
4497 Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4498 }
Andreas Gampe884f3b82016-03-30 19:52:58 -07004499 // Need to grab the lock to change status.
4500 ObjectLock<mirror::Class> super_lock(self, klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00004501 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Alex Lightf1f10492015-10-07 16:08:36 -07004502 return false;
4503}
4504
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004505verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
4506 verifier::VerifierDeps* verifier_deps,
4507 Handle<mirror::Class> klass,
4508 verifier::HardFailLogMode log_level) {
Andreas Gampe884f3b82016-03-30 19:52:58 -07004509 {
4510 // TODO: assert that the monitor on the Class is held
4511 ObjectLock<mirror::Class> lock(self, klass);
Elliott Hughesd9c67be2012-02-02 19:54:06 -08004512
Andreas Gampe884f3b82016-03-30 19:52:58 -07004513 // Is somebody verifying this now?
Vladimir Marko2c64a832018-01-04 11:31:56 +00004514 ClassStatus old_status = klass->GetStatus();
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004515 while (old_status == ClassStatus::kVerifying) {
Andreas Gampe884f3b82016-03-30 19:52:58 -07004516 lock.WaitIgnoringInterrupts();
Mathieu Chartier5ef70202017-06-29 10:45:10 -07004517 // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4518 // case we may see the same status again. b/62912904. This is why the check is
4519 // greater or equal.
4520 CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
David Sehr709b0702016-10-13 09:12:37 -07004521 << "Class '" << klass->PrettyClass()
4522 << "' performed an illegal verification state transition from " << old_status
4523 << " to " << klass->GetStatus();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004524 old_status = klass->GetStatus();
4525 }
jeffhao98eacac2011-09-14 16:11:53 -07004526
Andreas Gampe884f3b82016-03-30 19:52:58 -07004527 // The class might already be erroneous, for example at compile time if we attempted to verify
4528 // this class as a parent to another.
4529 if (klass->IsErroneous()) {
4530 ThrowEarlierClassFailure(klass.Get());
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004531 return verifier::FailureKind::kHardFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004532 }
Brian Carlstrom9b5ee882012-02-28 09:48:54 -08004533
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004534 // Don't attempt to re-verify if already verified.
Andreas Gampe884f3b82016-03-30 19:52:58 -07004535 if (klass->IsVerified()) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08004536 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
Nicolas Geoffray80789962021-04-30 16:50:39 +01004537 if (verifier_deps != nullptr &&
4538 verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
4539 !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
4540 !Runtime::Current()->IsAotCompiler()) {
4541 // If the klass is verified, but `verifier_deps` did not record it, this
4542 // means we are running background verification of a secondary dex file.
4543 // Re-run the verifier to populate `verifier_deps`.
4544 // No need to run the verification when running on the AOT Compiler, as
4545 // the driver handles those multithreaded cases already.
4546 std::string error_msg;
4547 verifier::FailureKind failure =
4548 PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4549 // We could have soft failures, so just check that we don't have a hard
4550 // failure.
4551 DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
4552 }
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004553 return verifier::FailureKind::kNoFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004554 }
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004555
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004556 if (klass->IsVerifiedNeedsAccessChecks()) {
4557 if (!Runtime::Current()->IsAotCompiler()) {
4558 // Mark the class as having a verification attempt to avoid re-running
4559 // the verifier and avoid calling EnsureSkipAccessChecksMethods.
4560 klass->SetVerificationAttempted();
4561 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4562 }
4563 return verifier::FailureKind::kAccessChecksFailure;
4564 }
4565
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004566 // For AOT, don't attempt to re-verify if we have already found we should
4567 // verify at runtime.
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004568 if (klass->ShouldVerifyAtRuntime()) {
4569 CHECK(Runtime::Current()->IsAotCompiler());
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004570 return verifier::FailureKind::kSoftFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004571 }
jeffhao98eacac2011-09-14 16:11:53 -07004572
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004573 DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4574 mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
Andreas Gampe884f3b82016-03-30 19:52:58 -07004575
4576 // Skip verification if disabled.
4577 if (!Runtime::Current()->IsVerificationEnabled()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00004578 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Andreas Gampecc1b5352016-12-01 16:58:38 -08004579 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004580 return verifier::FailureKind::kNoFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004581 }
Jeff Hao4a200f52014-04-01 14:58:49 -07004582 }
4583
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004584 VLOG(class_linker) << "Beginning verification for class: "
4585 << klass->PrettyDescriptor()
4586 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4587
Ian Rogers9ffb0392012-09-10 11:56:50 -07004588 // Verify super class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004589 StackHandleScope<2> hs(self);
Alex Lightf1f10492015-10-07 16:08:36 -07004590 MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4591 // If we have a superclass and we get a hard verification failure we can return immediately.
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004592 if (supertype != nullptr &&
4593 !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
Alex Lightf1f10492015-10-07 16:08:36 -07004594 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004595 return verifier::FailureKind::kHardFailure;
Alex Lightf1f10492015-10-07 16:08:36 -07004596 }
Ian Rogers1c5eb702012-02-01 09:18:34 -08004597
Alex Lightf1f10492015-10-07 16:08:36 -07004598 // Verify all default super-interfaces.
4599 //
4600 // (1) Don't bother if the superclass has already had a soft verification failure.
4601 //
4602 // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4603 // recursive initialization by themselves. This is because when an interface is initialized
4604 // directly it must not initialize its superinterfaces. We are allowed to verify regardless
4605 // but choose not to for an optimization. If the interfaces is being verified due to a class
4606 // initialization (which would need all the default interfaces to be verified) the class code
4607 // will trigger the recursive verification anyway.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004608 if ((supertype == nullptr || supertype->IsVerified()) // See (1)
Alex Lightf1f10492015-10-07 16:08:36 -07004609 && !klass->IsInterface()) { // See (2)
4610 int32_t iftable_count = klass->GetIfTableCount();
4611 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
4612 // Loop through all interfaces this class has defined. It doesn't matter the order.
4613 for (int32_t i = 0; i < iftable_count; i++) {
4614 iface.Assign(klass->GetIfTable()->GetInterface(i));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004615 DCHECK(iface != nullptr);
Alex Lightf1f10492015-10-07 16:08:36 -07004616 // We only care if we have default interfaces and can skip if we are already verified...
4617 if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
4618 continue;
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004619 } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
Alex Lightf1f10492015-10-07 16:08:36 -07004620 // We had a hard failure while verifying this interface. Just return immediately.
4621 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004622 return verifier::FailureKind::kHardFailure;
Alex Lightf1f10492015-10-07 16:08:36 -07004623 } else if (UNLIKELY(!iface->IsVerified())) {
4624 // We softly failed to verify the iface. Stop checking and clean up.
4625 // Put the iface into the supertype handle so we know what caused us to fail.
4626 supertype.Assign(iface.Get());
4627 break;
Ian Rogers1c5eb702012-02-01 09:18:34 -08004628 }
Ian Rogers1c5eb702012-02-01 09:18:34 -08004629 }
4630 }
4631
Alex Lightf1f10492015-10-07 16:08:36 -07004632 // At this point if verification failed, then supertype is the "first" supertype that failed
4633 // verification (without a specific order). If verification succeeded, then supertype is either
4634 // null or the original superclass of klass and is verified.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004635 DCHECK(supertype == nullptr ||
Alex Lightf1f10492015-10-07 16:08:36 -07004636 supertype.Get() == klass->GetSuperClass() ||
4637 !supertype->IsVerified());
4638
Elliott Hughes634eb2e2012-03-22 16:06:28 -07004639 // Try to use verification information from the oat file, otherwise do runtime verification.
Ian Rogers4445a7e2012-10-05 17:19:13 -07004640 const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
Vladimir Marko2c64a832018-01-04 11:31:56 +00004641 ClassStatus oat_file_class_status(ClassStatus::kNotReady);
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004642 bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004643
4644 VLOG(class_linker) << "Class preverified status for class "
4645 << klass->PrettyDescriptor()
4646 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4647 << ": "
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004648 << preverified
4649 << "( " << oat_file_class_status << ")";
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004650
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004651 // If the oat file says the class had an error, re-run the verifier. That way we will either:
4652 // 1) Be successful at runtime, or
4653 // 2) Get a precise error message.
Vladimir Marko72ab6842017-01-20 19:32:50 +00004654 DCHECK(!mirror::Class::IsErroneous(oat_file_class_status) || !preverified);
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004655
Ian Rogers62d6c772013-02-27 08:32:07 -08004656 std::string error_msg;
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004657 verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
jeffhaof1e6b7c2012-06-05 18:33:30 -07004658 if (!preverified) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004659 verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
jeffhaof1e6b7c2012-06-05 18:33:30 -07004660 }
Andreas Gampe884f3b82016-03-30 19:52:58 -07004661
4662 // Verification is done, grab the lock again.
4663 ObjectLock<mirror::Class> lock(self, klass);
4664
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004665 if (preverified || verifier_failure != verifier::FailureKind::kHardFailure) {
4666 if (!preverified && verifier_failure != verifier::FailureKind::kNoFailure) {
David Sehr709b0702016-10-13 09:12:37 -07004667 VLOG(class_linker) << "Soft verification failure in class "
4668 << klass->PrettyDescriptor()
4669 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4670 << " because: " << error_msg;
Ian Rogers529781d2012-07-23 17:24:29 -07004671 }
Ian Rogers1f539342012-10-03 21:09:42 -07004672 self->AssertNoPendingException();
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004673 // Make sure all classes referenced by catch blocks are resolved.
Alex Light5a559862016-01-29 12:24:48 -08004674 ResolveClassExceptionHandlerTypes(klass);
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004675 if (verifier_failure == verifier::FailureKind::kNoFailure) {
Alex Lightf1f10492015-10-07 16:08:36 -07004676 // Even though there were no verifier failures we need to respect whether the super-class and
4677 // super-default-interfaces were verified or requiring runtime reverification.
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004678 if (supertype == nullptr
4679 || supertype->IsVerified()
4680 || supertype->IsVerifiedNeedsAccessChecks()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00004681 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07004682 } else {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004683 CHECK(Runtime::Current()->IsAotCompiler());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004684 CHECK_EQ(supertype->GetStatus(), ClassStatus::kRetryVerificationAtRuntime);
4685 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
Alex Lightf1f10492015-10-07 16:08:36 -07004686 // Pretend a soft failure occurred so that we don't consider the class verified below.
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004687 verifier_failure = verifier::FailureKind::kSoftFailure;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07004688 }
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004689 } else {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004690 CHECK(verifier_failure == verifier::FailureKind::kSoftFailure ||
Nicolas Geoffrayd1728bf2021-01-12 14:02:29 +00004691 verifier_failure == verifier::FailureKind::kTypeChecksFailure ||
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004692 verifier_failure == verifier::FailureKind::kAccessChecksFailure);
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004693 // Soft failures at compile time should be retried at runtime. Soft
4694 // failures at runtime will be handled by slow paths in the generated
4695 // code. Set status accordingly.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08004696 if (Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffrayd1728bf2021-01-12 14:02:29 +00004697 if (verifier_failure == verifier::FailureKind::kSoftFailure ||
4698 verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004699 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4700 } else {
4701 mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
4702 }
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004703 } else {
Vladimir Marko2c64a832018-01-04 11:31:56 +00004704 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Igor Murashkindf707e42016-02-02 16:56:50 -08004705 // As this is a fake verified status, make sure the methods are _not_ marked
4706 // kAccSkipAccessChecks later.
4707 klass->SetVerificationAttempted();
jeffhaoe4f0b2a2012-08-30 11:18:57 -07004708 }
4709 }
jeffhao5cfd6fb2011-09-27 13:54:29 -07004710 } else {
David Sehr709b0702016-10-13 09:12:37 -07004711 VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004712 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4713 << " because: " << error_msg;
Ian Rogers00f7d0e2012-07-19 15:28:27 -07004714 self->AssertNoPendingException();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004715 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004716 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
jeffhao5cfd6fb2011-09-27 13:54:29 -07004717 }
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004718 if (preverified || verifier_failure == verifier::FailureKind::kNoFailure) {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004719 if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks ||
4720 UNLIKELY(Runtime::Current()->IsVerificationSoftFail())) {
Igor Murashkindf707e42016-02-02 16:56:50 -08004721 // Never skip access checks if the verification soft fail is forced.
4722 // Mark the class as having a verification attempt to avoid re-running the verifier.
4723 klass->SetVerificationAttempted();
4724 } else {
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004725 // Class is verified so we don't need to do any access check on its methods.
4726 // Let the interpreter know it by setting the kAccSkipAccessChecks flag onto each
4727 // method.
4728 // Note: we're going here during compilation and at runtime. When we set the
4729 // kAccSkipAccessChecks flag when compiling image classes, the flag is recorded
4730 // in the image and is set when loading the image.
Andreas Gampecc1b5352016-12-01 16:58:38 -08004731 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
Igor Murashkindf707e42016-02-02 16:56:50 -08004732 }
Andreas Gampe48498592014-09-10 19:48:05 -07004733 }
Nicolas Geoffray486dda02017-09-11 14:15:52 +01004734 // Done verifying. Notify the compiler about the verification status, in case the class
4735 // was verified implicitly (eg super class of a compiled class).
4736 if (Runtime::Current()->IsAotCompiler()) {
4737 Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
4738 ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
4739 }
Nicolas Geoffray08025182016-10-25 17:20:18 +01004740 return verifier_failure;
Andreas Gampe48498592014-09-10 19:48:05 -07004741}
4742
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004743verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004744 verifier::VerifierDeps* verifier_deps,
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004745 Handle<mirror::Class> klass,
4746 verifier::HardFailLogMode log_level,
4747 std::string* error_msg) {
4748 Runtime* const runtime = Runtime::Current();
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004749 return verifier::ClassVerifier::VerifyClass(self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004750 verifier_deps,
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004751 klass.Get(),
4752 runtime->GetCompilerCallbacks(),
4753 runtime->IsAotCompiler(),
4754 log_level,
4755 Runtime::Current()->GetTargetSdkVersion(),
4756 error_msg);
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004757}
4758
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004759bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
4760 const DexFile& dex_file,
4761 Handle<mirror::Class> klass,
Vladimir Marko2c64a832018-01-04 11:31:56 +00004762 ClassStatus& oat_file_class_status) {
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004763 // If we're compiling, we can only verify the class using the oat file if
4764 // we are not compiling the image or if the class we're verifying is not part of
Andreas Gampee9934582018-01-19 21:23:04 -08004765 // the compilation unit (app - dependencies). We will let the compiler callback
4766 // tell us about the latter.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08004767 if (Runtime::Current()->IsAotCompiler()) {
Andreas Gampee9934582018-01-19 21:23:04 -08004768 CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004769 // We are compiling an app (not the image).
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004770 if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004771 return false;
4772 }
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004773 }
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004774
Andreas Gampeb40d3612018-06-26 15:49:42 -07004775 const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004776 // In case we run without an image there won't be a backing oat file.
Mathieu Chartier1b868492016-11-16 16:22:37 -08004777 if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
Anwar Ghuloumad256bb2013-07-18 14:58:55 -07004778 return false;
4779 }
4780
Ian Rogers8b2c0b92013-09-19 02:56:49 -07004781 uint16_t class_def_index = klass->GetDexClassDefIndex();
Vladimir Markod3c5beb2014-04-11 16:32:51 +01004782 oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
Vladimir Marko2c64a832018-01-04 11:31:56 +00004783 if (oat_file_class_status >= ClassStatus::kVerified) {
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004784 return true;
4785 }
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004786 if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4787 // We return that the clas has already been verified, and the caller should
4788 // check the class status to ensure we run with access checks.
4789 return true;
4790 }
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004791
4792 // Check the class status with the vdex file.
4793 const OatFile* oat_file = oat_dex_file->GetOatFile();
4794 if (oat_file != nullptr) {
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004795 ClassStatus vdex_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
4796 if (vdex_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4797 oat_file_class_status = vdex_status;
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004798 return true;
4799 }
4800 }
4801
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004802 // If we only verified a subset of the classes at compile time, we can end up with classes that
4803 // were resolved by the verifier.
Vladimir Marko2c64a832018-01-04 11:31:56 +00004804 if (oat_file_class_status == ClassStatus::kResolved) {
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004805 return false;
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004806 }
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004807 // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
4808 CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
4809 << klass->PrettyClass() << " " << dex_file.GetLocation();
4810
Vladimir Marko72ab6842017-01-20 19:32:50 +00004811 if (mirror::Class::IsErroneous(oat_file_class_status)) {
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004812 // Compile time verification failed with a hard error. We'll re-run
4813 // verification, which might be successful at runtime.
jeffhao1ac29442012-03-26 11:37:32 -07004814 return false;
4815 }
Vladimir Marko2c64a832018-01-04 11:31:56 +00004816 if (oat_file_class_status == ClassStatus::kNotReady) {
Ian Rogersc4762272012-02-01 15:55:55 -08004817 // Status is uninitialized if we couldn't determine the status at compile time, for example,
4818 // not loading the class.
4819 // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
4820 // isn't a problem and this case shouldn't occur
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004821 return false;
4822 }
Ian Rogers1ff3c982014-08-12 02:30:58 -07004823 std::string temp;
Elliott Hughes634eb2e2012-03-22 16:06:28 -07004824 LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
David Sehr709b0702016-10-13 09:12:37 -07004825 << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
Ian Rogers1ff3c982014-08-12 02:30:58 -07004826 << klass->GetDescriptor(&temp);
Ian Rogerse0a02da2014-12-02 14:10:53 -08004827 UNREACHABLE();
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004828}
4829
Alex Light5a559862016-01-29 12:24:48 -08004830void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
Alex Light51a64d52015-12-17 13:55:59 -08004831 for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
Alex Light5a559862016-01-29 12:24:48 -08004832 ResolveMethodExceptionHandlerTypes(&method);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004833 }
4834}
4835
Alex Light5a559862016-01-29 12:24:48 -08004836void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004837 // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
David Sehr0225f8e2018-01-31 08:52:24 +00004838 CodeItemDataAccessor accessor(method->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004839 if (!accessor.HasCodeItem()) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004840 return; // native or abstract method
4841 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004842 if (accessor.TriesSize() == 0) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004843 return; // nothing to process
4844 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004845 const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004846 uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004847 for (uint32_t idx = 0; idx < handlers_size; idx++) {
4848 CatchHandlerIterator iterator(handlers_ptr);
4849 for (; iterator.HasNext(); iterator.Next()) {
4850 // Ensure exception types are resolved so that they don't need resolution to be delivered,
4851 // unresolved exception types will be ignored by exception delivery
Andreas Gampea5b09a62016-11-17 15:21:22 -08004852 if (iterator.GetHandlerTypeIndex().IsValid()) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004853 ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004854 if (exception_type == nullptr) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004855 DCHECK(Thread::Current()->IsExceptionPending());
4856 Thread::Current()->ClearException();
4857 }
4858 }
4859 }
4860 handlers_ptr = iterator.EndDataPointer();
4861 }
4862}
4863
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004864ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
4865 jstring name,
4866 jobjectArray interfaces,
4867 jobject loader,
4868 jobjectArray methods,
4869 jobjectArray throws) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07004870 Thread* self = soa.Self();
Alex Lighte9f61032018-09-24 16:04:51 -07004871
4872 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4873 // code to be executed. We put it up here so we can avoid all the allocations associated with
4874 // creating the class. This can happen with (eg) jit-threads.
4875 if (!self->CanLoadClasses()) {
4876 // Make sure we don't try to load anything, potentially causing an infinite loop.
4877 ObjPtr<mirror::Throwable> pre_allocated =
4878 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4879 self->SetException(pre_allocated);
4880 return nullptr;
4881 }
4882
Alex Light133987d2020-03-26 19:22:12 +00004883 StackHandleScope<12> hs(self);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004884 MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004885 AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004886 if (temp_klass == nullptr) {
Ian Rogersa436fde2013-08-27 23:34:06 -07004887 CHECK(self->IsExceptionPending()); // OOME.
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004888 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07004889 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004890 DCHECK(temp_klass->GetClass() != nullptr);
4891 temp_klass->SetObjectSize(sizeof(mirror::Proxy));
Igor Murashkindf707e42016-02-02 16:56:50 -08004892 // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
4893 // the methods.
Vladimir Markob68bb7a2020-03-17 10:55:25 +00004894 temp_klass->SetAccessFlagsDuringLinking(
4895 kAccClassIsProxy | kAccPublic | kAccFinal | kAccVerificationAttempted);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004896 temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
4897 DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
4898 temp_klass->SetName(soa.Decode<mirror::String>(name));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004899 temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
Mathieu Chartier6beced42016-11-15 15:51:31 -08004900 // Object has an empty iftable, copy it for that reason.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004901 temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004902 mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
Vladimir Marko3892e622019-03-15 15:22:18 +00004903 std::string storage;
4904 const char* descriptor = temp_klass->GetDescriptor(&storage);
4905 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004906
Mathieu Chartierd57d4542015-10-14 10:55:30 -07004907 // Needs to be before we insert the class so that the allocator field is set.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004908 LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
Mathieu Chartierd57d4542015-10-14 10:55:30 -07004909
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004910 // Insert the class before loading the fields as the field roots
4911 // (ArtField::declaring_class_) are only visited from the class
4912 // table. There can't be any suspend points between inserting the
4913 // class and setting the field arrays below.
Vladimir Marko3892e622019-03-15 15:22:18 +00004914 ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004915 CHECK(existing == nullptr);
Ian Rogersc2b44472011-12-14 21:17:17 -08004916
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004917 // Instance fields are inherited, but we add a couple of static fields...
Mathieu Chartierc7853442015-03-27 14:35:38 -07004918 const size_t num_fields = 2;
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07004919 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004920 temp_klass->SetSFieldsPtr(sfields);
Mathieu Chartierc7853442015-03-27 14:35:38 -07004921
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004922 // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
4923 // our proxy, so Class.getInterfaces doesn't return the flattened set.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004924 ArtField& interfaces_sfield = sfields->At(0);
4925 interfaces_sfield.SetDexFieldIndex(0);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004926 interfaces_sfield.SetDeclaringClass(temp_klass.Get());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004927 interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
Mathieu Chartierc7853442015-03-27 14:35:38 -07004928
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004929 // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004930 ArtField& throws_sfield = sfields->At(1);
4931 throws_sfield.SetDexFieldIndex(1);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004932 throws_sfield.SetDeclaringClass(temp_klass.Get());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004933 throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
Jesse Wilson95caa792011-10-12 18:14:17 -04004934
Ian Rogers466bb252011-10-14 03:29:56 -07004935 // Proxies have 1 direct method, the constructor
Alex Lighte64300b2015-12-15 15:02:47 -08004936 const size_t num_direct_methods = 1;
Jesse Wilson95caa792011-10-12 18:14:17 -04004937
Alex Light133987d2020-03-26 19:22:12 +00004938 // The array we get passed contains all methods, including private and static
4939 // ones that aren't proxied. We need to filter those out since only interface
4940 // methods (non-private & virtual) are actually proxied.
4941 Handle<mirror::ObjectArray<mirror::Method>> h_methods =
4942 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
Vladimir Marko679730e2018-05-25 15:06:48 +01004943 DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
David Sehr709b0702016-10-13 09:12:37 -07004944 << mirror::Class::PrettyClass(h_methods->GetClass());
Alex Light133987d2020-03-26 19:22:12 +00004945 // List of the actual virtual methods this class will have.
4946 std::vector<ArtMethod*> proxied_methods;
4947 std::vector<size_t> proxied_throws_idx;
4948 proxied_methods.reserve(h_methods->GetLength());
4949 proxied_throws_idx.reserve(h_methods->GetLength());
4950 // Filter out to only the non-private virtual methods.
4951 for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
4952 ArtMethod* m = mirror->GetArtMethod();
4953 if (!m->IsPrivate() && !m->IsStatic()) {
4954 proxied_methods.push_back(m);
4955 proxied_throws_idx.push_back(idx);
4956 }
4957 }
4958 const size_t num_virtual_methods = proxied_methods.size();
Alex Lightbc115092020-03-27 11:25:16 -07004959 // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
4960 // contains an array of all the classes each function is declared to throw.
4961 // This is used to wrap unexpected exceptions in a
4962 // UndeclaredThrowableException exception. This array is in the same order as
4963 // the methods array and like the methods array must be filtered to remove any
4964 // non-proxied methods.
Alex Light133987d2020-03-26 19:22:12 +00004965 const bool has_filtered_methods =
4966 static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
4967 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
4968 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
4969 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
4970 hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
4971 (has_filtered_methods)
4972 ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
4973 self, original_proxied_throws->GetClass(), num_virtual_methods)
4974 : original_proxied_throws.Get()));
Alex Lightbc115092020-03-27 11:25:16 -07004975 if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
4976 self->AssertPendingOOMException();
4977 return nullptr;
4978 }
Alex Light133987d2020-03-26 19:22:12 +00004979 if (has_filtered_methods) {
4980 for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
4981 DCHECK_LE(new_idx, orig_idx);
4982 proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
4983 }
4984 }
Alex Lighte64300b2015-12-15 15:02:47 -08004985
4986 // Create the methods array.
4987 LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
4988 self, allocator, num_direct_methods + num_virtual_methods);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004989 // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
4990 // want to throw OOM in the future.
Alex Lighte64300b2015-12-15 15:02:47 -08004991 if (UNLIKELY(proxy_class_methods == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004992 self->AssertPendingOOMException();
4993 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07004994 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004995 temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
Alex Lighte64300b2015-12-15 15:02:47 -08004996
4997 // Create the single direct method.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004998 CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
Alex Lighte64300b2015-12-15 15:02:47 -08004999
5000 // Create virtual method using specified prototypes.
5001 // TODO These should really use the iterators.
Jesse Wilson95caa792011-10-12 18:14:17 -04005002 for (size_t i = 0; i < num_virtual_methods; ++i) {
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005003 auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
Alex Light133987d2020-03-26 19:22:12 +00005004 auto* prototype = proxied_methods[i];
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005005 CreateProxyMethod(temp_klass, prototype, virtual_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005006 DCHECK(virtual_method->GetDeclaringClass() != nullptr);
5007 DCHECK(prototype->GetDeclaringClass() != nullptr);
Jesse Wilson95caa792011-10-12 18:14:17 -04005008 }
Ian Rogersc2b44472011-12-14 21:17:17 -08005009
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005010 // The super class is java.lang.reflect.Proxy
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005011 temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005012 // Now effectively in the loaded state.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005013 mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
Ian Rogers62d6c772013-02-27 08:32:07 -08005014 self->AssertNoPendingException();
Ian Rogersc2b44472011-12-14 21:17:17 -08005015
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005016 // At this point the class is loaded. Publish a ClassLoad event.
5017 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
5018 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
5019
5020 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
Ian Rogersc8982582012-09-07 16:53:25 -07005021 {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005022 // Must hold lock on object when resolved.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005023 ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005024 // Link the fields and virtual methods, creating vtable and iftables.
5025 // The new class will replace the old one in the class table.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005026 Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
Mathieu Chartier0795f232016-09-27 18:43:30 -07005027 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
Vladimir Marko3892e622019-03-15 15:22:18 +00005028 if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
Vladimir Markoa4d28dd2021-06-30 11:28:06 +01005029 if (!temp_klass->IsErroneous()) {
5030 mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
5031 }
Mathieu Chartierc528dba2013-11-26 12:00:11 -08005032 return nullptr;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07005033 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005034 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005035 CHECK(temp_klass->IsRetired());
5036 CHECK_NE(temp_klass.Get(), klass.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005037
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005038 CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
Mathieu Chartier0795f232016-09-27 18:43:30 -07005039 interfaces_sfield.SetObject<false>(
5040 klass.Get(),
Mathieu Chartierf8ac97f2016-10-05 15:56:52 -07005041 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005042 CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
5043 throws_sfield.SetObject<false>(
Mathieu Chartier0795f232016-09-27 18:43:30 -07005044 klass.Get(),
Alex Light133987d2020-03-26 19:22:12 +00005045 proxied_throws.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005046
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005047 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
5048
Vladimir Marko305c38b2018-02-14 11:50:07 +00005049 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5050 // See also ClassLinker::EnsureInitialized().
5051 if (kBitstringSubtypeCheckEnabled) {
5052 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5053 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
5054 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
5055 }
5056
Vladimir Markobf121912019-06-04 13:49:05 +01005057 VisiblyInitializedCallback* callback = nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005058 {
5059 // Lock on klass is released. Lock new class object.
5060 ObjectLock<mirror::Class> initialization_lock(self, klass);
Andreas Gampe5b20b352018-10-11 19:03:20 -07005061 EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
Vladimir Markobf121912019-06-04 13:49:05 +01005062 // Conservatively go through the ClassStatus::kInitialized state.
5063 callback = MarkClassInitialized(self, klass);
5064 }
5065 if (callback != nullptr) {
5066 callback->MakeVisible(self);
Ian Rogersc8982582012-09-07 16:53:25 -07005067 }
Ian Rogersc2b44472011-12-14 21:17:17 -08005068
David Srbecky346fd962020-07-27 16:51:00 +01005069 // Consistency checks.
Elliott Hughes67d92002012-03-26 15:08:51 -07005070 if (kIsDebugBuild) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005071 CHECK(klass->GetIFieldsPtr() == nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005072 CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
5073
Ian Rogersc2b44472011-12-14 21:17:17 -08005074 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005075 auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
Alex Light133987d2020-03-26 19:22:12 +00005076 CheckProxyMethod(virtual_method, proxied_methods[i]);
Ian Rogersc2b44472011-12-14 21:17:17 -08005077 }
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005078
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005079 StackHandleScope<1> hs2(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07005080 Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005081 std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
Mathieu Chartier590fee92013-09-13 13:46:47 -07005082 decoded_name->ToModifiedUtf8().c_str()));
David Sehr709b0702016-10-13 09:12:37 -07005083 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005084
5085 std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
Mathieu Chartier590fee92013-09-13 13:46:47 -07005086 decoded_name->ToModifiedUtf8().c_str()));
David Sehr709b0702016-10-13 09:12:37 -07005087 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
Ian Rogersc2b44472011-12-14 21:17:17 -08005088
Narayan Kamath6b2dc312017-03-14 13:26:12 +00005089 CHECK_EQ(klass.Get()->GetProxyInterfaces(),
Mathieu Chartierf8ac97f2016-10-05 15:56:52 -07005090 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
Narayan Kamath6b2dc312017-03-14 13:26:12 +00005091 CHECK_EQ(klass.Get()->GetProxyThrows(),
Alex Light133987d2020-03-26 19:22:12 +00005092 proxied_throws.Get());
Ian Rogersc2b44472011-12-14 21:17:17 -08005093 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005094 return klass.Get();
Jesse Wilson95caa792011-10-12 18:14:17 -04005095}
5096
Mathieu Chartiere401d142015-04-22 13:56:20 -07005097void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5098 // Create constructor for Proxy that must initialize the method.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005099 ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5100 CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
Przemyslaw Szczepaniakf11cd292016-08-17 17:46:38 +01005101
Igor Murashkin9d3d7522017-02-27 10:39:49 -08005102 // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5103 // on which front-end compiler was used to build the libcore DEX files.
Alex Light6cae5ea2018-06-07 17:07:02 -07005104 ArtMethod* proxy_constructor =
5105 jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init);
Igor Murashkin9d3d7522017-02-27 10:39:49 -08005106 DCHECK(proxy_constructor != nullptr)
5107 << "Could not find <init> method in java.lang.reflect.Proxy";
5108
Jeff Haodb8a6642014-08-14 17:18:52 -07005109 // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5110 // code_ too)
Mathieu Chartiere401d142015-04-22 13:56:20 -07005111 DCHECK(out != nullptr);
5112 out->CopyFrom(proxy_constructor, image_pointer_size_);
Vladimir Markoba118822017-06-12 15:41:56 +01005113 // Make this constructor public and fix the class to be our Proxy version.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005114 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
Vladimir Markoba118822017-06-12 15:41:56 +01005115 // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005116 out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5117 kAccPublic |
5118 kAccCompileDontBother);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005119 out->SetDeclaringClass(klass.Get());
Vladimir Markod1ee20f2017-08-17 09:21:16 +00005120
5121 // Set the original constructor method.
5122 out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
Ian Rogersc2b44472011-12-14 21:17:17 -08005123}
5124
Mathieu Chartiere401d142015-04-22 13:56:20 -07005125void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
Ian Rogers466bb252011-10-14 03:29:56 -07005126 CHECK(constructor->IsConstructor());
Mathieu Chartiere401d142015-04-22 13:56:20 -07005127 auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5128 CHECK_STREQ(np->GetName(), "<init>");
5129 CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
Ian Rogers466bb252011-10-14 03:29:56 -07005130 DCHECK(constructor->IsPublic());
Jesse Wilson95caa792011-10-12 18:14:17 -04005131}
5132
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005133void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005134 ArtMethod* out) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08005135 // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
Ian Rogers466bb252011-10-14 03:29:56 -07005136 // as necessary
Mathieu Chartiere401d142015-04-22 13:56:20 -07005137 DCHECK(out != nullptr);
5138 out->CopyFrom(prototype, image_pointer_size_);
Ian Rogers466bb252011-10-14 03:29:56 -07005139
Alex Lighte9dd04f2016-03-16 16:09:45 -07005140 // Set class to be the concrete proxy class.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005141 out->SetDeclaringClass(klass.Get());
Vladimir Markode0d0de2021-03-18 14:12:35 +00005142 // Clear the abstract and default flags to ensure that defaults aren't picked in
Alex Lighte9dd04f2016-03-16 16:09:45 -07005143 // preference to the invocation handler.
Vladimir Markode0d0de2021-03-18 14:12:35 +00005144 const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
Alex Lighte9dd04f2016-03-16 16:09:45 -07005145 // Make the method final.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005146 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5147 const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
Alex Lighte9dd04f2016-03-16 16:09:45 -07005148 out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5149
Vladimir Markod1ee20f2017-08-17 09:21:16 +00005150 // Set the original interface method.
5151 out->SetDataPtrSize(prototype, image_pointer_size_);
5152
Ian Rogers466bb252011-10-14 03:29:56 -07005153 // At runtime the method looks like a reference and argument saving method, clone the code
5154 // related parameters from this method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005155 out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
Ian Rogersc2b44472011-12-14 21:17:17 -08005156}
Jesse Wilson95caa792011-10-12 18:14:17 -04005157
Mathieu Chartiere401d142015-04-22 13:56:20 -07005158void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
David Srbecky346fd962020-07-27 16:51:00 +01005159 // Basic consistency checks.
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08005160 CHECK(!prototype->IsFinal());
5161 CHECK(method->IsFinal());
Alex Light9139e002015-10-09 15:59:48 -07005162 CHECK(method->IsInvokable());
Ian Rogers19846512012-02-24 11:42:47 -08005163
5164 // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5165 // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
Ian Rogers19846512012-02-24 11:42:47 -08005166 CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
Vladimir Marko5c3e9d12017-08-30 16:43:54 +01005167 CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
Jesse Wilson95caa792011-10-12 18:14:17 -04005168}
5169
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005170bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass, bool can_init_statics,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005171 bool can_init_parents) {
Brian Carlstrom610e49f2013-11-04 17:07:22 -08005172 if (can_init_statics && can_init_parents) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005173 return true;
5174 }
5175 if (!can_init_statics) {
5176 // Check if there's a class initializer.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005177 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005178 if (clinit != nullptr) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005179 return false;
5180 }
5181 // Check if there are encoded static values needing initialization.
5182 if (klass->NumStaticFields() != 0) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005183 const dex::ClassDef* dex_class_def = klass->GetClassDef();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005184 DCHECK(dex_class_def != nullptr);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005185 if (dex_class_def->static_values_off_ != 0) {
5186 return false;
5187 }
5188 }
Vladimir Marko889b72d2019-11-12 11:01:13 +00005189 }
5190 // If we are a class we need to initialize all interfaces with default methods when we are
5191 // initialized. Check all of them.
5192 if (!klass->IsInterface()) {
5193 size_t num_interfaces = klass->GetIfTableCount();
5194 for (size_t i = 0; i < num_interfaces; i++) {
5195 ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5196 if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5197 if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07005198 return false;
5199 }
5200 }
5201 }
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005202 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07005203 if (klass->IsInterface() || !klass->HasSuperClass()) {
5204 return true;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005205 }
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005206 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Vladimir Marko889b72d2019-11-12 11:01:13 +00005207 if (super_class->IsInitialized()) {
5208 return true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07005209 }
Vladimir Marko889b72d2019-11-12 11:01:13 +00005210 return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005211}
5212
Mathieu Chartier23369542020-03-04 08:24:11 -08005213bool ClassLinker::InitializeClass(Thread* self,
5214 Handle<mirror::Class> klass,
5215 bool can_init_statics,
5216 bool can_init_parents) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005217 // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5218
5219 // Are we already initialized and therefore done?
5220 // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5221 // an initialized class will never change its state.
5222 if (klass->IsInitialized()) {
5223 return true;
5224 }
5225
5226 // Fast fail if initialization requires a full runtime. Not part of the JLS.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005227 if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005228 return false;
5229 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005230
Ian Rogers7b078e82014-09-10 14:44:24 -07005231 self->AllowThreadSuspension();
Mathieu Chartier23369542020-03-04 08:24:11 -08005232 Runtime* const runtime = Runtime::Current();
5233 const bool stats_enabled = runtime->HasStatsEnabled();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005234 uint64_t t0;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005235 {
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005236 ObjectLock<mirror::Class> lock(self, klass);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005237
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005238 // Re-check under the lock in case another thread initialized ahead of us.
5239 if (klass->IsInitialized()) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005240 return true;
5241 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005242
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005243 // Was the class already found to be erroneous? Done under the lock to match the JLS.
Brian Carlstromd1422f82011-09-28 11:37:09 -07005244 if (klass->IsErroneous()) {
Andreas Gampe7b3063b2019-01-07 14:12:52 -08005245 ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
Brian Carlstromb23eab12014-10-08 17:55:21 -07005246 VlogClassInitializationFailure(klass);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005247 return false;
5248 }
5249
Vladimir Marko72ab6842017-01-20 19:32:50 +00005250 CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5251 << klass->PrettyClass() << ": state=" << klass->GetStatus();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005252
5253 if (!klass->IsVerified()) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00005254 VerifyClass(self, /*verifier_deps= */ nullptr, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005255 if (!klass->IsVerified()) {
5256 // We failed to verify, expect either the klass to be erroneous or verification failed at
5257 // compile time.
5258 if (klass->IsErroneous()) {
Andreas Gampefc49fa02016-04-21 12:21:55 -07005259 // The class is erroneous. This may be a verifier error, or another thread attempted
5260 // verification and/or initialization and failed. We can distinguish those cases by
5261 // whether an exception is already pending.
5262 if (self->IsExceptionPending()) {
5263 // Check that it's a VerifyError.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +01005264 DCHECK(IsVerifyError(self->GetException()));
Andreas Gampefc49fa02016-04-21 12:21:55 -07005265 } else {
5266 // Check that another thread attempted initialization.
5267 DCHECK_NE(0, klass->GetClinitThreadId());
5268 DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5269 // Need to rethrow the previous failure now.
5270 ThrowEarlierClassFailure(klass.Get(), true);
5271 }
Brian Carlstromb23eab12014-10-08 17:55:21 -07005272 VlogClassInitializationFailure(klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005273 } else {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08005274 CHECK(Runtime::Current()->IsAotCompiler());
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01005275 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
Vladimir Markod79b37b2018-11-02 13:06:22 +00005276 self->AssertNoPendingException();
5277 self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
jeffhaoa9b3bf42012-06-06 17:18:39 -07005278 }
Vladimir Markod79b37b2018-11-02 13:06:22 +00005279 self->AssertPendingException();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005280 return false;
Mathieu Chartier524507a2014-08-27 15:28:28 -07005281 } else {
5282 self->AssertNoPendingException();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005283 }
Andreas Gampefc49fa02016-04-21 12:21:55 -07005284
5285 // A separate thread could have moved us all the way to initialized. A "simple" example
5286 // involves a subclass of the current class being initialized at the same time (which
5287 // will implicitly initialize the superclass, if scheduled that way). b/28254258
Vladimir Marko72ab6842017-01-20 19:32:50 +00005288 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
Andreas Gampefc49fa02016-04-21 12:21:55 -07005289 if (klass->IsInitialized()) {
5290 return true;
5291 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005292 }
5293
Vladimir Marko2c64a832018-01-04 11:31:56 +00005294 // If the class is ClassStatus::kInitializing, either this thread is
Brian Carlstromd1422f82011-09-28 11:37:09 -07005295 // initializing higher up the stack or another thread has beat us
5296 // to initializing and we need to wait. Either way, this
5297 // invocation of InitializeClass will not be responsible for
5298 // running <clinit> and will return.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005299 if (klass->GetStatus() == ClassStatus::kInitializing) {
Mathieu Chartier524507a2014-08-27 15:28:28 -07005300 // Could have got an exception during verification.
5301 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005302 VlogClassInitializationFailure(klass);
Mathieu Chartier524507a2014-08-27 15:28:28 -07005303 return false;
5304 }
Elliott Hughes005ab2e2011-09-11 17:15:31 -07005305 // We caught somebody else in the act; was it us?
Elliott Hughesdcc24742011-09-07 14:02:44 -07005306 if (klass->GetClinitThreadId() == self->GetTid()) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005307 // Yes. That's fine. Return so we can continue initializing.
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005308 return true;
5309 }
Brian Carlstromd1422f82011-09-28 11:37:09 -07005310 // No. That's fine. Wait for another thread to finish initializing.
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005311 return WaitForInitializeClass(klass, self, lock);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005312 }
5313
Jeff Haoe2e40342017-07-19 10:45:18 -07005314 // Try to get the oat class's status for this class if the oat file is present. The compiler
5315 // tries to validate superclass descriptors, and writes the result into the oat file.
5316 // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5317 // is different at runtime than it was at compile time, the oat file is rejected. So if the
5318 // oat file is present, the classpaths must match, and the runtime time check can be skipped.
Jeff Hao0cb17282017-07-12 14:51:49 -07005319 bool has_oat_class = false;
Jeff Haoe2e40342017-07-19 10:45:18 -07005320 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5321 ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5322 : OatFile::OatClass::Invalid();
Vladimir Marko2c64a832018-01-04 11:31:56 +00005323 if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
Jeff Hao0cb17282017-07-12 14:51:49 -07005324 !ValidateSuperClassDescriptors(klass)) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005325 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005326 return false;
5327 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005328 self->AllowThreadSuspension();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005329
Vladimir Marko2c64a832018-01-04 11:31:56 +00005330 CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
Andreas Gampe9510ccd2016-04-20 09:55:25 -07005331 << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005332
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005333 // From here out other threads may observe that we're initializing and so changes of state
5334 // require the a notification.
Elliott Hughesdcc24742011-09-07 14:02:44 -07005335 klass->SetClinitThreadId(self->GetTid());
Vladimir Marko2c64a832018-01-04 11:31:56 +00005336 mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005337
Mathieu Chartier23369542020-03-04 08:24:11 -08005338 t0 = stats_enabled ? NanoTime() : 0u;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005339 }
5340
Andreas Gampeaf864702019-07-23 14:05:35 -07005341 uint64_t t_sub = 0;
5342
Brian Carlstrom6d3f72c2013-08-21 18:06:34 -07005343 // Initialize super classes, must be done while initializing for the JLS.
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005344 if (!klass->IsInterface() && klass->HasSuperClass()) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005345 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005346 if (!super_class->IsInitialized()) {
5347 CHECK(!super_class->IsInterface());
5348 CHECK(can_init_parents);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005349 StackHandleScope<1> hs(self);
5350 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
Mathieu Chartier23369542020-03-04 08:24:11 -08005351 uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
Ian Rogers7b078e82014-09-10 14:44:24 -07005352 bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
Mathieu Chartier23369542020-03-04 08:24:11 -08005353 uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005354 if (!super_initialized) {
5355 // The super class was verified ahead of entering initializing, we should only be here if
5356 // the super class became erroneous due to initialization.
Chang Xingadbb91c2017-07-17 11:23:55 -07005357 // For the case of aot compiler, the super class might also be initializing but we don't
5358 // want to process circular dependencies in pre-compile.
5359 CHECK(self->IsExceptionPending())
Brian Carlstromf3632832014-05-20 15:36:53 -07005360 << "Super class initialization failed for "
David Sehr709b0702016-10-13 09:12:37 -07005361 << handle_scope_super->PrettyDescriptor()
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005362 << " that has unexpected status " << handle_scope_super->GetStatus()
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005363 << "\nPending exception:\n"
Nicolas Geoffray14691c52015-03-05 10:40:17 +00005364 << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005365 ObjectLock<mirror::Class> lock(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005366 // Initialization failed because the super-class is erroneous.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005367 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005368 return false;
5369 }
Andreas Gampeaf864702019-07-23 14:05:35 -07005370 t_sub = super_t1 - super_t0;
Ian Rogers1bddec32012-02-04 12:27:34 -08005371 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005372 }
5373
Alex Lighteb7c1442015-08-31 13:17:42 -07005374 if (!klass->IsInterface()) {
5375 // Initialize interfaces with default methods for the JLS.
5376 size_t num_direct_interfaces = klass->NumDirectInterfaces();
Alex Light56a40f52015-10-14 11:07:41 -07005377 // Only setup the (expensive) handle scope if we actually need to.
5378 if (UNLIKELY(num_direct_interfaces > 0)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07005379 StackHandleScope<1> hs_iface(self);
Alex Light56a40f52015-10-14 11:07:41 -07005380 MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5381 for (size_t i = 0; i < num_direct_interfaces; i++) {
Vladimir Markob10668c2021-06-10 09:52:53 +01005382 handle_scope_iface.Assign(klass->GetDirectInterface(i));
Vladimir Marko8d6768d2017-03-14 10:13:21 +00005383 CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
Alex Light56a40f52015-10-14 11:07:41 -07005384 CHECK(handle_scope_iface->IsInterface());
5385 if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5386 // We have already done this for this interface. Skip it.
5387 continue;
5388 }
5389 // We cannot just call initialize class directly because we need to ensure that ALL
5390 // interfaces with default methods are initialized. Non-default interface initialization
5391 // will not affect other non-default super-interfaces.
Mathieu Chartier23369542020-03-04 08:24:11 -08005392 // This is not very precise, misses all walking.
5393 uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
Alex Light56a40f52015-10-14 11:07:41 -07005394 bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5395 handle_scope_iface,
5396 can_init_statics,
5397 can_init_parents);
Mathieu Chartier23369542020-03-04 08:24:11 -08005398 uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
Alex Light56a40f52015-10-14 11:07:41 -07005399 if (!iface_initialized) {
5400 ObjectLock<mirror::Class> lock(self, klass);
5401 // Initialization failed because one of our interfaces with default methods is erroneous.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005402 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Alex Light56a40f52015-10-14 11:07:41 -07005403 return false;
5404 }
Andreas Gampeaf864702019-07-23 14:05:35 -07005405 t_sub += inf_t1 - inf_t0;
Alex Lighteb7c1442015-08-31 13:17:42 -07005406 }
5407 }
5408 }
5409
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005410 const size_t num_static_fields = klass->NumStaticFields();
5411 if (num_static_fields > 0) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005412 const dex::ClassDef* dex_class_def = klass->GetClassDef();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005413 CHECK(dex_class_def != nullptr);
Hiroshi Yamauchi67ef46a2014-08-21 15:59:43 -07005414 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005415 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
Mathieu Chartierf8322842014-05-16 10:59:25 -07005416 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005417
5418 // Eagerly fill in static fields so that the we don't have to do as many expensive
5419 // Class::FindStaticField in ResolveField.
5420 for (size_t i = 0; i < num_static_fields; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07005421 ArtField* field = klass->GetStaticField(i);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005422 const uint32_t field_idx = field->GetDexFieldIndex();
David Srbecky5de5efe2021-02-15 21:23:00 +00005423 ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005424 if (resolved_field == nullptr) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01005425 // Populating cache of a dex file which defines `klass` should always be allowed.
David Brazdilf50ac102018-10-17 18:00:06 +01005426 DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5427 field,
5428 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5429 hiddenapi::AccessMethod::kNone));
David Srbecky5de5efe2021-02-15 21:23:00 +00005430 dex_cache->SetResolvedField(field_idx, field);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07005431 } else {
5432 DCHECK_EQ(field, resolved_field);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005433 }
5434 }
5435
Vladimir Markoe11dd502017-12-08 14:09:45 +00005436 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5437 class_loader,
David Sehr9323e6e2016-09-13 08:58:35 -07005438 this,
5439 *dex_class_def);
Vladimir Markoe11dd502017-12-08 14:09:45 +00005440 const DexFile& dex_file = *dex_cache->GetDexFile();
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005441
Hiroshi Yamauchi88500112014-08-22 12:12:56 -07005442 if (value_it.HasNext()) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005443 ClassAccessor accessor(dex_file, *dex_class_def);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005444 CHECK(can_init_statics);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005445 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5446 if (!value_it.HasNext()) {
5447 break;
5448 }
5449 ArtField* art_field = ResolveField(field.GetIndex(),
5450 dex_cache,
5451 class_loader,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07005452 /* is_static= */ true);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005453 if (Runtime::Current()->IsActiveTransaction()) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005454 value_it.ReadValueToField<true>(art_field);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005455 } else {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005456 value_it.ReadValueToField<false>(art_field);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005457 }
Mathieu Chartierda595be2016-08-10 13:57:39 -07005458 if (self->IsExceptionPending()) {
5459 break;
5460 }
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005461 value_it.Next();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005462 }
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005463 DCHECK(self->IsExceptionPending() || !value_it.HasNext());
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005464 }
5465 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005466
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005467
Mathieu Chartierda595be2016-08-10 13:57:39 -07005468 if (!self->IsExceptionPending()) {
5469 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5470 if (clinit != nullptr) {
5471 CHECK(can_init_statics);
5472 JValue result;
5473 clinit->Invoke(self, nullptr, 0, &result, "V");
5474 }
5475 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005476 self->AllowThreadSuspension();
Mathieu Chartier23369542020-03-04 08:24:11 -08005477 uint64_t t1 = stats_enabled ? NanoTime() : 0u;
Elliott Hughes83df2ac2011-10-11 16:37:54 -07005478
Vladimir Markobf121912019-06-04 13:49:05 +01005479 VisiblyInitializedCallback* callback = nullptr;
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005480 bool success = true;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005481 {
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005482 ObjectLock<mirror::Class> lock(self, klass);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005483
5484 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005485 WrapExceptionInInitializer(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005486 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005487 success = false;
Sebastien Hertz1c80bec2015-02-03 11:58:06 +01005488 } else if (Runtime::Current()->IsTransactionAborted()) {
5489 // The exception thrown when the transaction aborted has been caught and cleared
5490 // so we need to throw it again now.
David Sehr709b0702016-10-13 09:12:37 -07005491 VLOG(compiler) << "Return from class initializer of "
5492 << mirror::Class::PrettyDescriptor(klass.Get())
Sebastien Hertzbd9cf9f2015-03-03 12:16:13 +01005493 << " without exception while transaction was aborted: re-throw it now.";
Mathieu Chartier23369542020-03-04 08:24:11 -08005494 runtime->ThrowTransactionAbortError(self);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005495 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Sebastien Hertz1c80bec2015-02-03 11:58:06 +01005496 success = false;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005497 } else {
Mathieu Chartier23369542020-03-04 08:24:11 -08005498 if (stats_enabled) {
5499 RuntimeStats* global_stats = runtime->GetStats();
5500 RuntimeStats* thread_stats = self->GetStats();
5501 ++global_stats->class_init_count;
5502 ++thread_stats->class_init_count;
5503 global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5504 thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5505 }
Ian Rogerse6bb3b22013-08-19 21:51:45 -07005506 // Set the class as initialized except if failed to initialize static fields.
Vladimir Markobf121912019-06-04 13:49:05 +01005507 callback = MarkClassInitialized(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005508 if (VLOG_IS_ON(class_linker)) {
Ian Rogers1ff3c982014-08-12 02:30:58 -07005509 std::string temp;
5510 LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
Mathieu Chartierf8322842014-05-16 10:59:25 -07005511 klass->GetLocation();
Brian Carlstromae826982011-11-09 01:33:42 -08005512 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005513 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005514 }
Vladimir Markobf121912019-06-04 13:49:05 +01005515 if (callback != nullptr) {
5516 callback->MakeVisible(self);
5517 }
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005518 return success;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005519}
5520
Alex Lighteb7c1442015-08-31 13:17:42 -07005521// We recursively run down the tree of interfaces. We need to do this in the order they are declared
5522// and perform the initialization only on those interfaces that contain default methods.
5523bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5524 Handle<mirror::Class> iface,
5525 bool can_init_statics,
5526 bool can_init_parents) {
5527 CHECK(iface->IsInterface());
5528 size_t num_direct_ifaces = iface->NumDirectInterfaces();
Alex Light56a40f52015-10-14 11:07:41 -07005529 // Only create the (expensive) handle scope if we need it.
5530 if (UNLIKELY(num_direct_ifaces > 0)) {
5531 StackHandleScope<1> hs(self);
5532 MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5533 // First we initialize all of iface's super-interfaces recursively.
5534 for (size_t i = 0; i < num_direct_ifaces; i++) {
Vladimir Markob10668c2021-06-10 09:52:53 +01005535 ObjPtr<mirror::Class> super_iface = iface->GetDirectInterface(i);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00005536 CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
Alex Light56a40f52015-10-14 11:07:41 -07005537 if (!super_iface->HasBeenRecursivelyInitialized()) {
5538 // Recursive step
5539 handle_super_iface.Assign(super_iface);
5540 if (!InitializeDefaultInterfaceRecursive(self,
5541 handle_super_iface,
5542 can_init_statics,
5543 can_init_parents)) {
5544 return false;
5545 }
Alex Lighteb7c1442015-08-31 13:17:42 -07005546 }
5547 }
5548 }
5549
5550 bool result = true;
5551 // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5552 // initialize if we don't have default methods.
5553 if (iface->HasDefaultMethods()) {
5554 result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5555 }
5556
5557 // Mark that this interface has undergone recursive default interface initialization so we know we
5558 // can skip it on any later class initializations. We do this even if we are not a default
5559 // interface since we can still avoid the traversal. This is purely a performance optimization.
5560 if (result) {
5561 // TODO This should be done in a better way
Andreas Gampe976b2982018-03-02 17:54:22 -08005562 // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5563 // interface. It is bad (Java) style, but not impossible. Marking the recursive
5564 // initialization is a performance optimization (to avoid another idempotent visit
5565 // for other implementing classes/interfaces), and can be revisited later.
5566 ObjectTryLock<mirror::Class> lock(self, iface);
5567 if (lock.Acquired()) {
5568 iface->SetRecursivelyInitialized();
5569 }
Alex Lighteb7c1442015-08-31 13:17:42 -07005570 }
5571 return result;
5572}
5573
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005574bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5575 Thread* self,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005576 ObjectLock<mirror::Class>& lock)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005577 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005578 while (true) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -07005579 self->AssertNoPendingException();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005580 CHECK(!klass->IsInitialized());
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005581 lock.WaitIgnoringInterrupts();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005582
5583 // When we wake up, repeat the test for init-in-progress. If
5584 // there's an exception pending (only possible if
Brian Carlstromb23eab12014-10-08 17:55:21 -07005585 // we were not using WaitIgnoringInterrupts), bail out.
Brian Carlstromd1422f82011-09-28 11:37:09 -07005586 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005587 WrapExceptionInInitializer(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005588 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005589 return false;
5590 }
5591 // Spurious wakeup? Go back to waiting.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005592 if (klass->GetStatus() == ClassStatus::kInitializing) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005593 continue;
5594 }
Vladimir Marko2c64a832018-01-04 11:31:56 +00005595 if (klass->GetStatus() == ClassStatus::kVerified &&
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08005596 Runtime::Current()->IsAotCompiler()) {
Ian Rogers3d1548d2012-09-24 14:08:03 -07005597 // Compile time initialization failed.
5598 return false;
5599 }
Brian Carlstromd1422f82011-09-28 11:37:09 -07005600 if (klass->IsErroneous()) {
5601 // The caller wants an exception, but it was thrown in a
5602 // different thread. Synthesize one here.
Brian Carlstromdf143242011-10-10 18:05:34 -07005603 ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
David Sehr709b0702016-10-13 09:12:37 -07005604 klass->PrettyDescriptor().c_str());
Brian Carlstromb23eab12014-10-08 17:55:21 -07005605 VlogClassInitializationFailure(klass);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005606 return false;
5607 }
5608 if (klass->IsInitialized()) {
5609 return true;
5610 }
David Sehr709b0702016-10-13 09:12:37 -07005611 LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
Mathieu Chartierc528dba2013-11-26 12:00:11 -08005612 << klass->GetStatus();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005613 }
Ian Rogers07140832014-09-30 15:43:59 -07005614 UNREACHABLE();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005615}
5616
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005617static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5618 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005619 ArtMethod* method,
5620 ArtMethod* m)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005621 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005622 DCHECK(Thread::Current()->IsExceptionPending());
5623 DCHECK(!m->IsProxyMethod());
5624 const DexFile* dex_file = m->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005625 const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
5626 const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
Andreas Gampea5b09a62016-11-17 15:21:22 -08005627 dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
David Sehr709b0702016-10-13 09:12:37 -07005628 std::string return_type = dex_file->PrettyType(return_type_idx);
5629 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005630 ThrowWrappedLinkageError(klass.Get(),
5631 "While checking class %s method %s signature against %s %s: "
5632 "Failed to resolve return type %s with %s",
David Sehr709b0702016-10-13 09:12:37 -07005633 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5634 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005635 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005636 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005637 return_type.c_str(), class_loader.c_str());
5638}
5639
5640static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
5641 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005642 ArtMethod* method,
5643 ArtMethod* m,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005644 uint32_t index,
Andreas Gampea5b09a62016-11-17 15:21:22 -08005645 dex::TypeIndex arg_type_idx)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005646 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005647 DCHECK(Thread::Current()->IsExceptionPending());
5648 DCHECK(!m->IsProxyMethod());
5649 const DexFile* dex_file = m->GetDexFile();
David Sehr709b0702016-10-13 09:12:37 -07005650 std::string arg_type = dex_file->PrettyType(arg_type_idx);
5651 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005652 ThrowWrappedLinkageError(klass.Get(),
5653 "While checking class %s method %s signature against %s %s: "
5654 "Failed to resolve arg %u type %s with %s",
David Sehr709b0702016-10-13 09:12:37 -07005655 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5656 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005657 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005658 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005659 index, arg_type.c_str(), class_loader.c_str());
5660}
5661
5662static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
5663 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005664 ArtMethod* method,
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005665 const std::string& error_msg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005666 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005667 ThrowLinkageError(klass.Get(),
5668 "Class %s method %s resolves differently in %s %s: %s",
David Sehr709b0702016-10-13 09:12:37 -07005669 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5670 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005671 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005672 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005673 error_msg.c_str());
5674}
5675
Ian Rogersb5fb2072014-12-02 17:22:02 -08005676static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005677 Handle<mirror::Class> klass,
5678 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005679 ArtMethod* method1,
5680 ArtMethod* method2)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005681 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogersb5fb2072014-12-02 17:22:02 -08005682 {
5683 StackHandleScope<1> hs(self);
Vladimir Markob45528c2017-07-27 14:14:28 +01005684 Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08005685 if (UNLIKELY(return_type == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005686 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005687 return false;
5688 }
Vladimir Markob45528c2017-07-27 14:14:28 +01005689 ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005690 if (UNLIKELY(other_return_type == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005691 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005692 return false;
5693 }
Vladimir Marko862f43c2015-02-10 18:22:57 +00005694 if (UNLIKELY(other_return_type != return_type.Get())) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005695 ThrowSignatureMismatch(klass, super_klass, method1,
5696 StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
David Sehr709b0702016-10-13 09:12:37 -07005697 return_type->PrettyClassAndClassLoader().c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005698 return_type.Get(),
David Sehr709b0702016-10-13 09:12:37 -07005699 other_return_type->PrettyClassAndClassLoader().c_str(),
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005700 other_return_type.Ptr()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005701 return false;
5702 }
5703 }
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005704 const dex::TypeList* types1 = method1->GetParameterTypeList();
5705 const dex::TypeList* types2 = method2->GetParameterTypeList();
Ian Rogersb5fb2072014-12-02 17:22:02 -08005706 if (types1 == nullptr) {
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005707 if (types2 != nullptr && types2->Size() != 0) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005708 ThrowSignatureMismatch(klass, super_klass, method1,
5709 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005710 method2->PrettyMethod(true).c_str()));
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005711 return false;
5712 }
5713 return true;
Ian Rogersb5fb2072014-12-02 17:22:02 -08005714 } else if (UNLIKELY(types2 == nullptr)) {
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005715 if (types1->Size() != 0) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005716 ThrowSignatureMismatch(klass, super_klass, method1,
5717 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005718 method2->PrettyMethod(true).c_str()));
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005719 return false;
5720 }
5721 return true;
Ian Rogersb5fb2072014-12-02 17:22:02 -08005722 }
5723 uint32_t num_types = types1->Size();
5724 if (UNLIKELY(num_types != types2->Size())) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005725 ThrowSignatureMismatch(klass, super_klass, method1,
5726 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005727 method2->PrettyMethod(true).c_str()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005728 return false;
5729 }
5730 for (uint32_t i = 0; i < num_types; ++i) {
Vladimir Marko862f43c2015-02-10 18:22:57 +00005731 StackHandleScope<1> hs(self);
Andreas Gampea5b09a62016-11-17 15:21:22 -08005732 dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
Vladimir Marko862f43c2015-02-10 18:22:57 +00005733 Handle<mirror::Class> param_type(hs.NewHandle(
Vladimir Markob45528c2017-07-27 14:14:28 +01005734 method1->ResolveClassFromTypeIndex(param_type_idx)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08005735 if (UNLIKELY(param_type == nullptr)) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005736 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005737 method1, i, param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005738 return false;
5739 }
Andreas Gampea5b09a62016-11-17 15:21:22 -08005740 dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005741 ObjPtr<mirror::Class> other_param_type =
Vladimir Markob45528c2017-07-27 14:14:28 +01005742 method2->ResolveClassFromTypeIndex(other_param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005743 if (UNLIKELY(other_param_type == nullptr)) {
5744 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005745 method2, i, other_param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005746 return false;
5747 }
Vladimir Marko862f43c2015-02-10 18:22:57 +00005748 if (UNLIKELY(param_type.Get() != other_param_type)) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005749 ThrowSignatureMismatch(klass, super_klass, method1,
5750 StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
5751 i,
David Sehr709b0702016-10-13 09:12:37 -07005752 param_type->PrettyClassAndClassLoader().c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005753 param_type.Get(),
David Sehr709b0702016-10-13 09:12:37 -07005754 other_param_type->PrettyClassAndClassLoader().c_str(),
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005755 other_param_type.Ptr()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005756 return false;
5757 }
5758 }
5759 return true;
5760}
5761
5762
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005763bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005764 if (klass->IsInterface()) {
5765 return true;
5766 }
Ian Rogers151f2212014-05-06 11:27:27 -07005767 // Begin with the methods local to the superclass.
Ian Rogersded66a02014-10-28 18:12:55 -07005768 Thread* self = Thread::Current();
Mathieu Chartiere401d142015-04-22 13:56:20 -07005769 StackHandleScope<1> hs(self);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005770 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005771 if (klass->HasSuperClass() &&
5772 klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005773 super_klass.Assign(klass->GetSuperClass());
Mingyao Yang2cdbad72014-07-16 10:44:41 -07005774 for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005775 auto* m = klass->GetVTableEntry(i, image_pointer_size_);
5776 auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
5777 if (m != super_m) {
Vladimir Marko942fd312017-01-16 20:52:19 +00005778 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5779 klass,
5780 super_klass,
5781 m,
5782 super_m))) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005783 self->AssertPendingException();
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005784 return false;
5785 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005786 }
5787 }
5788 }
Brian Carlstrom4b620ff2011-09-11 01:11:01 -07005789 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005790 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
5791 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
5792 uint32_t num_methods = super_klass->NumVirtualMethods();
Ian Rogers151f2212014-05-06 11:27:27 -07005793 for (uint32_t j = 0; j < num_methods; ++j) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005794 auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
5795 j, image_pointer_size_);
5796 auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
5797 if (m != super_m) {
Vladimir Marko942fd312017-01-16 20:52:19 +00005798 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5799 klass,
5800 super_klass,
5801 m,
5802 super_m))) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005803 self->AssertPendingException();
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005804 return false;
5805 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005806 }
5807 }
5808 }
5809 }
5810 return true;
5811}
5812
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005813bool ClassLinker::EnsureInitialized(Thread* self,
5814 Handle<mirror::Class> c,
5815 bool can_init_fields,
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005816 bool can_init_parents) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08005817 DCHECK(c != nullptr);
Igor Murashkin86083f72017-10-27 10:59:04 -07005818
Mathieu Chartier524507a2014-08-27 15:28:28 -07005819 if (c->IsInitialized()) {
Vladimir Marko8e110652019-07-30 10:14:41 +01005820 // If we've seen an initialized but not visibly initialized class
5821 // many times, request visible initialization.
5822 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
5823 // Thanks to the x86 memory model classes skip the initialized status.
5824 DCHECK(c->IsVisiblyInitialized());
5825 } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
5826 if (self->IncrementMakeVisiblyInitializedCounter()) {
5827 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
5828 }
5829 }
Andreas Gampe5b20b352018-10-11 19:03:20 -07005830 DCHECK(c->WasVerificationAttempted()) << c->PrettyClassAndClassLoader();
Mathieu Chartier524507a2014-08-27 15:28:28 -07005831 return true;
5832 }
Igor Murashkin86083f72017-10-27 10:59:04 -07005833 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5834 //
5835 // Ensure the bitstring is initialized before any of the class initialization
5836 // logic occurs. Once a class initializer starts running, objects can
5837 // escape into the heap and use the subtype checking code.
5838 //
5839 // Note: A class whose SubtypeCheckInfo is at least Initialized means it
5840 // can be used as a source for the IsSubClass check, and that all ancestors
5841 // of the class are Assigned (can be used as a target for IsSubClass check)
5842 // or Overflowed (can be used as a source for IsSubClass check).
Vladimir Marko305c38b2018-02-14 11:50:07 +00005843 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -07005844 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
Vladimir Marko38b8b252018-01-02 19:07:06 +00005845 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
Igor Murashkin86083f72017-10-27 10:59:04 -07005846 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
5847 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005848 const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
Mathieu Chartier524507a2014-08-27 15:28:28 -07005849 if (!success) {
5850 if (can_init_fields && can_init_parents) {
David Sehr709b0702016-10-13 09:12:37 -07005851 CHECK(self->IsExceptionPending()) << c->PrettyClass();
Vladimir Markoac576912021-03-31 11:16:22 +01005852 } else {
5853 // There may or may not be an exception pending. If there is, clear it.
5854 // We propagate the exception only if we can initialize fields and parents.
5855 self->ClearException();
Mathieu Chartier524507a2014-08-27 15:28:28 -07005856 }
5857 } else {
5858 self->AssertNoPendingException();
Ian Rogers595799e2012-01-11 17:32:51 -08005859 }
5860 return success;
Elliott Hughesf4c21c92011-08-19 17:31:31 -07005861}
5862
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005863void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
5864 ObjPtr<mirror::Class> new_class) {
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005865 DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005866 for (ArtField& field : new_class->GetIFields()) {
5867 if (field.GetDeclaringClass() == temp_class) {
5868 field.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005869 }
5870 }
5871
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005872 DCHECK_EQ(temp_class->NumStaticFields(), 0u);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005873 for (ArtField& field : new_class->GetSFields()) {
5874 if (field.GetDeclaringClass() == temp_class) {
5875 field.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005876 }
5877 }
5878
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005879 DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005880 DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
Alex Lighte64300b2015-12-15 15:02:47 -08005881 for (auto& method : new_class->GetMethods(image_pointer_size_)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005882 if (method.GetDeclaringClass() == temp_class) {
5883 method.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005884 }
5885 }
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005886
5887 // Make sure the remembered set and mod-union tables know that we updated some of the native
5888 // roots.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07005889 WriteBarrier::ForEveryFieldWrite(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005890}
5891
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005892void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08005893 CHECK(class_loader->GetAllocator() == nullptr);
5894 CHECK(class_loader->GetClassTable() == nullptr);
5895 Thread* const self = Thread::Current();
5896 ClassLoaderData data;
Ian Rogers55256cb2017-12-21 17:07:11 -08005897 data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
Mathieu Chartier5b830502016-03-02 10:30:23 -08005898 // Create and set the class table.
5899 data.class_table = new ClassTable;
5900 class_loader->SetClassTable(data.class_table);
5901 // Create and set the linear allocator.
5902 data.allocator = Runtime::Current()->CreateLinearAlloc();
5903 class_loader->SetAllocator(data.allocator);
5904 // Add to the list so that we know to free the data later.
5905 class_loaders_.push_back(data);
5906}
5907
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005908ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier6b069532015-08-05 15:08:12 -07005909 if (class_loader == nullptr) {
Andreas Gampe2af99022017-04-25 08:32:59 -07005910 return boot_class_table_.get();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005911 }
Mathieu Chartier6b069532015-08-05 15:08:12 -07005912 ClassTable* class_table = class_loader->GetClassTable();
5913 if (class_table == nullptr) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08005914 RegisterClassLoader(class_loader);
5915 class_table = class_loader->GetClassTable();
5916 DCHECK(class_table != nullptr);
Mathieu Chartier6b069532015-08-05 15:08:12 -07005917 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005918 return class_table;
5919}
5920
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005921ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampe2af99022017-04-25 08:32:59 -07005922 return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005923}
5924
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005925static ImTable* FindSuperImt(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005926 REQUIRES_SHARED(Locks::mutator_lock_) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005927 while (klass->HasSuperClass()) {
5928 klass = klass->GetSuperClass();
5929 if (klass->ShouldHaveImt()) {
5930 return klass->GetImt(pointer_size);
5931 }
5932 }
5933 return nullptr;
5934}
5935
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005936bool ClassLinker::LinkClass(Thread* self,
5937 const char* descriptor,
5938 Handle<mirror::Class> klass,
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005939 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005940 MutableHandle<mirror::Class>* h_new_class_out) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005941 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005942
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005943 if (!LinkSuperClass(klass)) {
5944 return false;
5945 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005946 ArtMethod* imt_data[ImTable::kSize];
5947 // If there are any new conflicts compared to super class.
5948 bool new_conflict = false;
Nicolas Geoffray918dcea2017-07-21 07:58:14 +00005949 std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005950 if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005951 return false;
5952 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005953 if (!LinkInstanceFields(self, klass)) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005954 return false;
5955 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005956 size_t class_size;
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005957 if (!LinkStaticFields(self, klass, &class_size)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -07005958 return false;
5959 }
5960 CreateReferenceInstanceOffsets(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005961 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005962
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005963 ImTable* imt = nullptr;
5964 if (klass->ShouldHaveImt()) {
5965 // If there are any new conflicts compared to the super class we can not make a copy. There
5966 // can be cases where both will have a conflict method at the same slot without having the same
5967 // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
5968 // will possibly create a table that is incorrect for either of the classes.
5969 // Same IMT with new_conflict does not happen very often.
5970 if (!new_conflict) {
5971 ImTable* super_imt = FindSuperImt(klass.Get(), image_pointer_size_);
5972 if (super_imt != nullptr) {
5973 bool imt_equals = true;
5974 for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
5975 imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
5976 }
5977 if (imt_equals) {
5978 imt = super_imt;
5979 }
5980 }
5981 }
5982 if (imt == nullptr) {
5983 LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
5984 imt = reinterpret_cast<ImTable*>(
5985 allocator->Alloc(self, ImTable::SizeInBytes(image_pointer_size_)));
5986 if (imt == nullptr) {
5987 return false;
5988 }
5989 imt->Populate(imt_data, image_pointer_size_);
5990 }
5991 }
5992
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005993 if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
5994 // We don't need to retire this class as it has no embedded tables or it was created the
5995 // correct size during class linker initialization.
David Sehr709b0702016-10-13 09:12:37 -07005996 CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005997
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005998 if (klass->ShouldHaveEmbeddedVTable()) {
5999 klass->PopulateEmbeddedVTable(image_pointer_size_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006000 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006001 if (klass->ShouldHaveImt()) {
6002 klass->SetImt(imt, image_pointer_size_);
6003 }
Mingyao Yang063fc772016-08-02 11:02:54 -07006004
6005 // Update CHA info based on whether we override methods.
6006 // Have to do this before setting the class as resolved which allows
6007 // instantiation of klass.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006008 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07006009 cha_->UpdateAfterLoadingOf(klass);
6010 }
Nicolas Geoffray918dcea2017-07-21 07:58:14 +00006011
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006012 // This will notify waiters on klass that saw the not yet resolved
6013 // class in the class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006014 mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006015 h_new_class_out->Assign(klass.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006016 } else {
6017 CHECK(!klass->IsResolved());
6018 // Retire the temporary class and create the correctly sized resolved class.
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006019 StackHandleScope<1> hs(self);
Vladimir Marko3068d582019-05-28 16:39:29 +01006020 Handle<mirror::Class> h_new_class =
6021 hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
Mathieu Chartier3ee25bb2015-08-10 10:13:02 -07006022 // Set arrays to null since we don't want to have multiple classes with the same ArtField or
6023 // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
6024 // may not see any references to the target space and clean the card for a class if another
6025 // class had the same array pointer.
Alex Lighte64300b2015-12-15 15:02:47 -08006026 klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07006027 klass->SetSFieldsPtrUnchecked(nullptr);
6028 klass->SetIFieldsPtrUnchecked(nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08006029 if (UNLIKELY(h_new_class == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006030 self->AssertPendingOOMException();
Vladimir Marko2c64a832018-01-04 11:31:56 +00006031 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006032 return false;
6033 }
6034
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006035 CHECK_EQ(h_new_class->GetClassSize(), class_size);
6036 ObjectLock<mirror::Class> lock(self, h_new_class);
6037 FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006038
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006039 if (LIKELY(descriptor != nullptr)) {
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07006040 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko0984e482019-03-27 16:41:41 +00006041 const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006042 ClassTable* const table = InsertClassTableForClassLoader(class_loader);
Vladimir Marko0984e482019-03-27 16:41:41 +00006043 const ObjPtr<mirror::Class> existing =
6044 table->UpdateClass(descriptor, h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
Mathieu Chartier05aa4d32015-09-19 12:44:38 -07006045 if (class_loader != nullptr) {
6046 // We updated the class in the class table, perform the write barrier so that the GC knows
6047 // about the change.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07006048 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier05aa4d32015-09-19 12:44:38 -07006049 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006050 CHECK_EQ(existing, klass.Get());
Vladimir Marko1998cd02017-01-13 13:02:58 +00006051 if (log_new_roots_) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006052 new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get()));
6053 }
6054 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006055
Mingyao Yang063fc772016-08-02 11:02:54 -07006056 // Update CHA info based on whether we override methods.
6057 // Have to do this before setting the class as resolved which allows
6058 // instantiation of klass.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006059 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07006060 cha_->UpdateAfterLoadingOf(h_new_class);
6061 }
Mingyao Yang063fc772016-08-02 11:02:54 -07006062
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006063 // This will notify waiters on temp class that saw the not yet resolved class in the
6064 // class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006065 mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006066
Vladimir Marko2c64a832018-01-04 11:31:56 +00006067 CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006068 // This will notify waiters on new_class that saw the not yet resolved
6069 // class in the class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006070 mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006071 // Return the new class.
6072 h_new_class_out->Assign(h_new_class.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006073 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006074 return true;
6075}
6076
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07006077bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00006078 CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006079 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
Andreas Gampea5b09a62016-11-17 15:21:22 -08006080 dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6081 if (super_class_idx.IsValid()) {
Roland Levillain90328ac2016-05-18 12:25:38 +01006082 // Check that a class does not inherit from itself directly.
6083 //
6084 // TODO: This is a cheap check to detect the straightforward case
6085 // of a class extending itself (b/28685551), but we should do a
6086 // proper cycle detection on loaded classes, to detect all cases
6087 // of class circularity errors (b/28830038).
6088 if (super_class_idx == class_def.class_idx_) {
6089 ThrowClassCircularityError(klass.Get(),
6090 "Class %s extends itself",
David Sehr709b0702016-10-13 09:12:37 -07006091 klass->PrettyDescriptor().c_str());
Roland Levillain90328ac2016-05-18 12:25:38 +01006092 return false;
6093 }
6094
Vladimir Marko666ee3d2017-12-11 18:37:36 +00006095 ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006096 if (super_class == nullptr) {
Brian Carlstrom65ca0772011-09-24 16:03:08 -07006097 DCHECK(Thread::Current()->IsExceptionPending());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006098 return false;
6099 }
Ian Rogersbe125a92012-01-11 15:19:49 -08006100 // Verify
6101 if (!klass->CanAccess(super_class)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006102 ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
David Sehr709b0702016-10-13 09:12:37 -07006103 super_class->PrettyDescriptor().c_str(),
6104 klass->PrettyDescriptor().c_str());
Ian Rogersbe125a92012-01-11 15:19:49 -08006105 return false;
6106 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006107 CHECK(super_class->IsResolved());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006108 klass->SetSuperClass(super_class);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006109 }
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006110 const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006111 if (interfaces != nullptr) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006112 for (size_t i = 0; i < interfaces->Size(); i++) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08006113 dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
Vladimir Marko666ee3d2017-12-11 18:37:36 +00006114 ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006115 if (interface == nullptr) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006116 DCHECK(Thread::Current()->IsExceptionPending());
6117 return false;
6118 }
6119 // Verify
6120 if (!klass->CanAccess(interface)) {
6121 // TODO: the RI seemed to ignore this in my testing.
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006122 ThrowIllegalAccessError(klass.Get(),
6123 "Interface %s implemented by class %s is inaccessible",
David Sehr709b0702016-10-13 09:12:37 -07006124 interface->PrettyDescriptor().c_str(),
6125 klass->PrettyDescriptor().c_str());
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006126 return false;
6127 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006128 }
6129 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07006130 // Mark the class as loaded.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006131 mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006132 return true;
6133}
6134
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07006135bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006136 CHECK(!klass->IsPrimitive());
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006137 ObjPtr<mirror::Class> super = klass->GetSuperClass();
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006138 ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6139 if (klass.Get() == object_class) {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006140 if (super != nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006141 ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006142 return false;
6143 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006144 return true;
6145 }
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006146 if (super == nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006147 ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
David Sehr709b0702016-10-13 09:12:37 -07006148 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006149 return false;
6150 }
6151 // Verify
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006152 if (klass->IsInterface() && super != object_class) {
Vladimir Marko1fcae9f2017-11-28 14:14:19 +00006153 ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6154 return false;
6155 }
Vladimir Markob43b2d82017-07-18 17:46:38 +01006156 if (super->IsFinal()) {
6157 ThrowVerifyError(klass.Get(),
6158 "Superclass %s of %s is declared final",
6159 super->PrettyDescriptor().c_str(),
6160 klass->PrettyDescriptor().c_str());
6161 return false;
6162 }
6163 if (super->IsInterface()) {
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006164 ThrowIncompatibleClassChangeError(klass.Get(),
Vladimir Markob43b2d82017-07-18 17:46:38 +01006165 "Superclass %s of %s is an interface",
David Sehr709b0702016-10-13 09:12:37 -07006166 super->PrettyDescriptor().c_str(),
Vladimir Markob43b2d82017-07-18 17:46:38 +01006167 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006168 return false;
6169 }
6170 if (!klass->CanAccess(super)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006171 ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
David Sehr709b0702016-10-13 09:12:37 -07006172 super->PrettyDescriptor().c_str(),
6173 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006174 return false;
6175 }
Elliott Hughes20cde902011-10-04 17:37:27 -07006176
Brian Carlstromf3632832014-05-20 15:36:53 -07006177 // Inherit kAccClassIsFinalizable from the superclass in case this
6178 // class doesn't override finalize.
Elliott Hughes20cde902011-10-04 17:37:27 -07006179 if (super->IsFinalizable()) {
6180 klass->SetFinalizable();
6181 }
6182
Mathieu Chartiere4275c02015-08-06 15:34:15 -07006183 // Inherit class loader flag form super class.
6184 if (super->IsClassLoaderClass()) {
6185 klass->SetClassLoaderClass();
6186 }
6187
Elliott Hughes2da50362011-10-10 16:57:08 -07006188 // Inherit reference flags (if any) from the superclass.
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07006189 uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
Elliott Hughes2da50362011-10-10 16:57:08 -07006190 if (reference_flags != 0) {
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07006191 CHECK_EQ(klass->GetClassFlags(), 0u);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07006192 klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
Elliott Hughes2da50362011-10-10 16:57:08 -07006193 }
Elliott Hughes72ee0ae2011-10-10 17:31:28 -07006194 // Disallow custom direct subclasses of java.lang.ref.Reference.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006195 if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006196 ThrowLinkageError(klass.Get(),
Ian Rogers62d6c772013-02-27 08:32:07 -08006197 "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
David Sehr709b0702016-10-13 09:12:37 -07006198 klass->PrettyDescriptor().c_str());
Elliott Hughes72ee0ae2011-10-10 17:31:28 -07006199 return false;
6200 }
Elliott Hughes2da50362011-10-10 16:57:08 -07006201
Ian Rogers7dfb28c2013-08-22 08:18:36 -07006202 if (kIsDebugBuild) {
6203 // Ensure super classes are fully resolved prior to resolving fields..
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006204 while (super != nullptr) {
Ian Rogers7dfb28c2013-08-22 08:18:36 -07006205 CHECK(super->IsResolved());
6206 super = super->GetSuperClass();
6207 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006208 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006209 return true;
6210}
6211
Vladimir Marko782fb712020-12-23 12:47:31 +00006212// A wrapper class representing the result of a method translation used for linking methods and
6213// updating superclass default methods. For each method in a classes vtable there are 4 states it
6214// could be in:
6215// 1) No translation is necessary. In this case there is no MethodTranslation object for it. This
6216// is the standard case and is true when the method is not overridable by a default method,
6217// the class defines a concrete implementation of the method, the default method implementation
6218// remains the same, or an abstract method stayed abstract.
6219// 2) The method must be translated to a different default method. We note this with
6220// CreateTranslatedMethod.
6221// 3) The method must be replaced with a conflict method. This happens when a superclass
6222// implements an interface with a default method and this class implements an unrelated
6223// interface that also defines that default method. We note this with CreateConflictingMethod.
6224// 4) The method must be replaced with an abstract miranda method. This happens when a superclass
6225// implements an interface with a default method and this class implements a subinterface of
6226// the superclass's interface which declares the default method abstract. We note this with
6227// CreateAbstractMethod.
6228//
6229// When a method translation is unnecessary (case #1), we don't put it into the
6230// default_translation maps. So an instance of MethodTranslation must be in one of #2-#4.
6231class ClassLinker::MethodTranslation {
6232 public:
6233 MethodTranslation() : translation_(nullptr), type_(Type::kInvalid) {}
6234
6235 // This slot must become a default conflict method.
6236 static MethodTranslation CreateConflictingMethod() {
6237 return MethodTranslation(Type::kConflict, /*translation=*/nullptr);
6238 }
6239
6240 // This slot must become an abstract method.
6241 static MethodTranslation CreateAbstractMethod() {
6242 return MethodTranslation(Type::kAbstract, /*translation=*/nullptr);
6243 }
6244
6245 // Use the given method as the current value for this vtable slot during translation.
6246 static MethodTranslation CreateTranslatedMethod(ArtMethod* new_method) {
6247 return MethodTranslation(Type::kTranslation, new_method);
6248 }
6249
6250 // Returns true if this is a method that must become a conflict method.
6251 bool IsInConflict() const {
6252 return type_ == Type::kConflict;
6253 }
6254
6255 // Returns true if this is a method that must become an abstract method.
6256 bool IsAbstract() const {
6257 return type_ == Type::kAbstract;
6258 }
6259
6260 // Returns true if this is a method that must become a different method.
6261 bool IsTranslation() const {
6262 return type_ == Type::kTranslation;
6263 }
6264
6265 // Get the translated version of this method.
6266 ArtMethod* GetTranslation() const {
6267 DCHECK(IsTranslation());
6268 DCHECK(translation_ != nullptr);
6269 return translation_;
6270 }
6271
6272 private:
6273 enum class Type {
6274 kInvalid,
6275 kTranslation,
6276 kConflict,
6277 kAbstract,
6278 };
6279
6280 MethodTranslation(Type type, ArtMethod* translation)
6281 : translation_(translation), type_(type) {}
6282
6283 ArtMethod* translation_;
6284 Type type_;
6285};
6286
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006287// Populate the class vtable and itable. Compute return type indices.
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006288bool ClassLinker::LinkMethods(Thread* self,
6289 Handle<mirror::Class> klass,
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006290 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006291 bool* out_new_conflict,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07006292 ArtMethod** out_imt) {
Ian Rogers7b078e82014-09-10 14:44:24 -07006293 self->AllowThreadSuspension();
Alex Lighteb7c1442015-08-31 13:17:42 -07006294 // A map from vtable indexes to the method they need to be updated to point to. Used because we
6295 // need to have default methods be in the virtuals array of each class but we don't set that up
6296 // until LinkInterfaceMethods.
Vladimir Marko782fb712020-12-23 12:47:31 +00006297 constexpr size_t kBufferSize = 8; // Avoid malloc/free for a few translations.
6298 std::pair<size_t, ClassLinker::MethodTranslation> buffer[kBufferSize];
6299 HashMap<size_t, ClassLinker::MethodTranslation> default_translations(buffer, kBufferSize);
Alex Lighteb7c1442015-08-31 13:17:42 -07006300 // Link virtual methods then interface methods.
6301 // We set up the interface lookup table first because we need it to determine if we need to update
6302 // any vtable entries with new default method implementations.
6303 return SetupInterfaceLookupTable(self, klass, interfaces)
6304 && LinkVirtualMethods(self, klass, /*out*/ &default_translations)
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006305 && LinkInterfaceMethods(self, klass, default_translations, out_new_conflict, out_imt);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006306}
6307
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006308// Comparator for name and signature of a method, used in finding overriding methods. Implementation
6309// avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6310// caches in the implementation below.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01006311class MethodNameAndSignatureComparator final : public ValueObject {
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006312 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -07006313 explicit MethodNameAndSignatureComparator(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006314 REQUIRES_SHARED(Locks::mutator_lock_) :
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006315 dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006316 name_view_() {
David Sehr709b0702016-10-13 09:12:37 -07006317 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006318 }
6319
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006320 ALWAYS_INLINE std::string_view GetNameView() {
6321 if (name_view_.empty()) {
6322 name_view_ = dex_file_->StringViewByIdx(mid_->name_idx_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006323 }
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006324 return name_view_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006325 }
6326
Mathieu Chartiere401d142015-04-22 13:56:20 -07006327 bool HasSameNameAndSignature(ArtMethod* other)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006328 REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -07006329 DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006330 const DexFile* other_dex_file = other->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006331 const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006332 if (dex_file_ == other_dex_file) {
6333 return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6334 }
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006335 return GetNameView() == other_dex_file->StringViewByIdx(other_mid.name_idx_) &&
6336 dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006337 }
6338
6339 private:
6340 // Dex file for the method to compare against.
6341 const DexFile* const dex_file_;
6342 // MethodId for the method to compare against.
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006343 const dex::MethodId* const mid_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006344 // Lazily computed name from the dex file's strings.
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006345 std::string_view name_view_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006346};
6347
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006348class LinkVirtualHashTable {
6349 public:
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006350 LinkVirtualHashTable(Handle<mirror::Class> klass,
6351 size_t hash_size,
6352 uint32_t* hash_table,
Andreas Gampe542451c2016-07-26 09:02:02 -07006353 PointerSize image_pointer_size)
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006354 : klass_(klass),
6355 hash_size_(hash_size),
6356 hash_table_(hash_table),
Mathieu Chartiere401d142015-04-22 13:56:20 -07006357 image_pointer_size_(image_pointer_size) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006358 std::fill(hash_table_, hash_table_ + hash_size_, invalid_index_);
6359 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006360
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006361 void Add(uint32_t virtual_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006362 ArtMethod* local_method = klass_->GetVirtualMethodDuringLinking(
6363 virtual_method_index, image_pointer_size_);
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006364 std::string_view name_view =
6365 local_method->GetInterfaceMethodIfProxy(image_pointer_size_)->GetNameView();
6366 uint32_t hash = ComputeModifiedUtf8Hash(name_view);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006367 uint32_t index = hash % hash_size_;
6368 // Linear probe until we have an empty slot.
6369 while (hash_table_[index] != invalid_index_) {
6370 if (++index == hash_size_) {
6371 index = 0;
6372 }
6373 }
6374 hash_table_[index] = virtual_method_index;
6375 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006376
Vladimir Marko43354742021-02-03 15:37:01 +00006377 uint32_t FindAndRemove(MethodNameAndSignatureComparator* comparator, uint32_t hash)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006378 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006379 DCHECK_EQ(hash, ComputeModifiedUtf8Hash(comparator->GetNameView()));
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006380 size_t index = hash % hash_size_;
6381 while (true) {
6382 const uint32_t value = hash_table_[index];
6383 // Since linear probe makes continuous blocks, hitting an invalid index means we are done
6384 // the block and can safely assume not found.
6385 if (value == invalid_index_) {
6386 break;
6387 }
6388 if (value != removed_index_) { // This signifies not already overriden.
Mathieu Chartiere401d142015-04-22 13:56:20 -07006389 ArtMethod* virtual_method =
6390 klass_->GetVirtualMethodDuringLinking(value, image_pointer_size_);
6391 if (comparator->HasSameNameAndSignature(
6392 virtual_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006393 hash_table_[index] = removed_index_;
6394 return value;
6395 }
6396 }
6397 if (++index == hash_size_) {
6398 index = 0;
6399 }
6400 }
6401 return GetNotFoundIndex();
6402 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006403
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006404 static uint32_t GetNotFoundIndex() {
6405 return invalid_index_;
6406 }
6407
6408 private:
6409 static const uint32_t invalid_index_;
6410 static const uint32_t removed_index_;
6411
6412 Handle<mirror::Class> klass_;
6413 const size_t hash_size_;
6414 uint32_t* const hash_table_;
Andreas Gampe542451c2016-07-26 09:02:02 -07006415 const PointerSize image_pointer_size_;
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006416};
6417
6418const uint32_t LinkVirtualHashTable::invalid_index_ = std::numeric_limits<uint32_t>::max();
6419const uint32_t LinkVirtualHashTable::removed_index_ = std::numeric_limits<uint32_t>::max() - 1;
6420
Stephen Hines1ddd9132017-02-08 01:51:18 -08006421bool ClassLinker::LinkVirtualMethods(
Alex Lighteb7c1442015-08-31 13:17:42 -07006422 Thread* self,
6423 Handle<mirror::Class> klass,
Vladimir Marko782fb712020-12-23 12:47:31 +00006424 /*out*/HashMap<size_t, ClassLinker::MethodTranslation>* default_translations) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006425 const size_t num_virtual_methods = klass->NumVirtualMethods();
Alex Lighteb7c1442015-08-31 13:17:42 -07006426 if (klass->IsInterface()) {
6427 // No vtable.
6428 if (!IsUint<16>(num_virtual_methods)) {
6429 ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
6430 return false;
6431 }
6432 bool has_defaults = false;
Alex Lighteb7c1442015-08-31 13:17:42 -07006433 // Assign each method an IMT index and set the default flag.
6434 for (size_t i = 0; i < num_virtual_methods; ++i) {
6435 ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6436 m->SetMethodIndex(i);
6437 if (!m->IsAbstract()) {
Vladimir Marko1c993cd2020-05-28 09:30:06 +00006438 // If the dex file does not support default methods, throw ClassFormatError.
6439 // This check is necessary to protect from odd cases, such as native default
6440 // methods, that the dex file verifier permits for old dex file versions. b/157170505
6441 // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
6442 // currently running CTS tests for default methods with dex file version 035 which
6443 // does not support default methods. So, we limit this to native methods. b/157718952
6444 if (m->IsNative()) {
6445 DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
6446 ThrowClassFormatError(klass.Get(),
6447 "Dex file does not support default method '%s'",
6448 m->PrettyMethod().c_str());
6449 return false;
6450 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006451 m->SetAccessFlags(m->GetAccessFlags() | kAccDefault);
6452 has_defaults = true;
6453 }
6454 }
6455 // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
6456 // during initialization. This is a performance optimization. We could simply traverse the
6457 // virtual_methods_ array again during initialization.
6458 if (has_defaults) {
6459 klass->SetHasDefaultMethods();
6460 }
6461 return true;
6462 } else if (klass->HasSuperClass()) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006463 const size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
6464 const size_t max_count = num_virtual_methods + super_vtable_length;
Vladimir Marko3068d582019-05-28 16:39:29 +01006465 StackHandleScope<3> hs(self);
Mingyao Yang38eecb02014-08-13 14:51:03 -07006466 Handle<mirror::Class> super_class(hs.NewHandle(klass->GetSuperClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -07006467 MutableHandle<mirror::PointerArray> vtable;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006468 if (super_class->ShouldHaveEmbeddedVTable()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006469 vtable = hs.NewHandle(AllocPointerArray(self, max_count));
Andreas Gampefa4333d2017-02-14 11:10:34 -08006470 if (UNLIKELY(vtable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006471 self->AssertPendingOOMException();
Mingyao Yang2cdbad72014-07-16 10:44:41 -07006472 return false;
6473 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006474 for (size_t i = 0; i < super_vtable_length; i++) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006475 vtable->SetElementPtrSize(
6476 i, super_class->GetEmbeddedVTableEntry(i, image_pointer_size_), image_pointer_size_);
Mingyao Yang2cdbad72014-07-16 10:44:41 -07006477 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006478 // We might need to change vtable if we have new virtual methods or new interfaces (since that
6479 // might give us new default methods). If no new interfaces then we can skip the rest since
6480 // the class cannot override any of the super-class's methods. This is required for
6481 // correctness since without it we might not update overridden default method vtable entries
6482 // correctly.
6483 if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006484 klass->SetVTable(vtable.Get());
6485 return true;
6486 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -07006487 } else {
Alex Lighteb7c1442015-08-31 13:17:42 -07006488 DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
Vladimir Marko3068d582019-05-28 16:39:29 +01006489 Handle<mirror::PointerArray> super_vtable = hs.NewHandle(super_class->GetVTable());
David Sehr709b0702016-10-13 09:12:37 -07006490 CHECK(super_vtable != nullptr) << super_class->PrettyClass();
Alex Lighteb7c1442015-08-31 13:17:42 -07006491 // We might need to change vtable if we have new virtual methods or new interfaces (since that
6492 // might give us new default methods). See comment above.
6493 if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
Vladimir Marko3068d582019-05-28 16:39:29 +01006494 klass->SetVTable(super_vtable.Get());
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006495 return true;
6496 }
Vladimir Marko3068d582019-05-28 16:39:29 +01006497 vtable = hs.NewHandle(ObjPtr<mirror::PointerArray>::DownCast(
6498 mirror::Array::CopyOf(super_vtable, self, max_count)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08006499 if (UNLIKELY(vtable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006500 self->AssertPendingOOMException();
Mingyao Yang2cdbad72014-07-16 10:44:41 -07006501 return false;
6502 }
Ian Rogersa436fde2013-08-27 23:34:06 -07006503 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006504 // How the algorithm works:
6505 // 1. Populate hash table by adding num_virtual_methods from klass. The values in the hash
6506 // table are: invalid_index for unused slots, index super_vtable_length + i for a virtual
6507 // method which has not been matched to a vtable method, and j if the virtual method at the
6508 // index overrode the super virtual method at index j.
6509 // 2. Loop through super virtual methods, if they overwrite, update hash table to j
6510 // (j < super_vtable_length) to avoid redundant checks. (TODO maybe use this info for reducing
6511 // the need for the initial vtable which we later shrink back down).
6512 // 3. Add non overridden methods to the end of the vtable.
6513 static constexpr size_t kMaxStackHash = 250;
Alex Lighteb7c1442015-08-31 13:17:42 -07006514 // + 1 so that even if we only have new default methods we will still be able to use this hash
6515 // table (i.e. it will never have 0 size).
6516 const size_t hash_table_size = num_virtual_methods * 3 + 1;
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006517 uint32_t* hash_table_ptr;
6518 std::unique_ptr<uint32_t[]> hash_heap_storage;
6519 if (hash_table_size <= kMaxStackHash) {
6520 hash_table_ptr = reinterpret_cast<uint32_t*>(
6521 alloca(hash_table_size * sizeof(*hash_table_ptr)));
6522 } else {
6523 hash_heap_storage.reset(new uint32_t[hash_table_size]);
6524 hash_table_ptr = hash_heap_storage.get();
6525 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07006526 LinkVirtualHashTable hash_table(klass, hash_table_size, hash_table_ptr, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006527 // Add virtual methods to the hash table.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006528 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006529 DCHECK(klass->GetVirtualMethodDuringLinking(
6530 i, image_pointer_size_)->GetDeclaringClass() != nullptr);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006531 hash_table.Add(i);
6532 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006533 // Loop through each super vtable method and see if they are overridden by a method we added to
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006534 // the hash table.
6535 for (size_t j = 0; j < super_vtable_length; ++j) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006536 // Search the hash table to see if we are overridden by any method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07006537 ArtMethod* super_method = vtable->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
Alex Lightc7a420c2016-10-18 14:33:18 -07006538 if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
6539 super_method->GetAccessFlags())) {
6540 // Continue on to the next method since this one is package private and canot be overridden.
6541 // Before Android 4.1, the package-private method super_method might have been incorrectly
6542 // overridden.
6543 continue;
6544 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006545 MethodNameAndSignatureComparator super_method_name_comparator(
Mathieu Chartiere401d142015-04-22 13:56:20 -07006546 super_method->GetInterfaceMethodIfProxy(image_pointer_size_));
Alex Lightc7a420c2016-10-18 14:33:18 -07006547 // We remove the method so that subsequent lookups will be faster by making the hash-map
6548 // smaller as we go on.
Vladimir Marko43354742021-02-03 15:37:01 +00006549 uint32_t hash = (j < mirror::Object::kVTableLength)
6550 ? object_virtual_method_hashes_[j]
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006551 : ComputeModifiedUtf8Hash(super_method_name_comparator.GetNameView());
Vladimir Marko43354742021-02-03 15:37:01 +00006552 uint32_t hash_index = hash_table.FindAndRemove(&super_method_name_comparator, hash);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006553 if (hash_index != hash_table.GetNotFoundIndex()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006554 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(
6555 hash_index, image_pointer_size_);
Alex Lightc7a420c2016-10-18 14:33:18 -07006556 if (super_method->IsFinal()) {
6557 ThrowLinkageError(klass.Get(), "Method %s overrides final method in class %s",
6558 virtual_method->PrettyMethod().c_str(),
6559 super_method->GetDeclaringClassDescriptor());
6560 return false;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006561 }
Alex Lightc7a420c2016-10-18 14:33:18 -07006562 vtable->SetElementPtrSize(j, virtual_method, image_pointer_size_);
6563 virtual_method->SetMethodIndex(j);
Alex Light9139e002015-10-09 15:59:48 -07006564 } else if (super_method->IsOverridableByDefaultMethod()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006565 // We didn't directly override this method but we might through default methods...
6566 // Check for default method update.
6567 ArtMethod* default_method = nullptr;
Alex Light9139e002015-10-09 15:59:48 -07006568 switch (FindDefaultMethodImplementation(self,
6569 super_method,
6570 klass,
6571 /*out*/&default_method)) {
6572 case DefaultMethodSearchResult::kDefaultConflict: {
6573 // A conflict was found looking for default methods. Note this (assuming it wasn't
6574 // pre-existing) in the translations map.
6575 if (UNLIKELY(!super_method->IsDefaultConflicting())) {
6576 // Don't generate another conflict method to reduce memory use as an optimization.
6577 default_translations->insert(
6578 {j, ClassLinker::MethodTranslation::CreateConflictingMethod()});
6579 }
6580 break;
6581 }
6582 case DefaultMethodSearchResult::kAbstractFound: {
6583 // No conflict but method is abstract.
6584 // We note that this vtable entry must be made abstract.
6585 if (UNLIKELY(!super_method->IsAbstract())) {
6586 default_translations->insert(
6587 {j, ClassLinker::MethodTranslation::CreateAbstractMethod()});
6588 }
6589 break;
6590 }
6591 case DefaultMethodSearchResult::kDefaultFound: {
6592 if (UNLIKELY(super_method->IsDefaultConflicting() ||
6593 default_method->GetDeclaringClass() != super_method->GetDeclaringClass())) {
6594 // Found a default method implementation that is new.
6595 // TODO Refactor this add default methods to virtuals here and not in
6596 // LinkInterfaceMethods maybe.
6597 // The problem is default methods might override previously present
6598 // default-method or miranda-method vtable entries from the superclass.
6599 // Unfortunately we need these to be entries in this class's virtuals. We do not
6600 // give these entries there until LinkInterfaceMethods so we pass this map around
6601 // to let it know which vtable entries need to be updated.
6602 // Make a note that vtable entry j must be updated, store what it needs to be updated
6603 // to. We will allocate a virtual method slot in LinkInterfaceMethods and fix it up
6604 // then.
6605 default_translations->insert(
6606 {j, ClassLinker::MethodTranslation::CreateTranslatedMethod(default_method)});
David Sehr709b0702016-10-13 09:12:37 -07006607 VLOG(class_linker) << "Method " << super_method->PrettyMethod()
6608 << " overridden by default "
6609 << default_method->PrettyMethod()
6610 << " in " << mirror::Class::PrettyClass(klass.Get());
Alex Light9139e002015-10-09 15:59:48 -07006611 }
6612 break;
6613 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006614 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006615 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006616 }
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006617 size_t actual_count = super_vtable_length;
Alex Lighteb7c1442015-08-31 13:17:42 -07006618 // Add the non-overridden methods at the end.
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006619 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006620 ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006621 size_t method_idx = local_method->GetMethodIndexDuringLinking();
6622 if (method_idx < super_vtable_length &&
Mathieu Chartiere401d142015-04-22 13:56:20 -07006623 local_method == vtable->GetElementPtrSize<ArtMethod*>(method_idx, image_pointer_size_)) {
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006624 continue;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006625 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07006626 vtable->SetElementPtrSize(actual_count, local_method, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006627 local_method->SetMethodIndex(actual_count);
6628 ++actual_count;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006629 }
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08006630 if (!IsUint<16>(actual_count)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006631 ThrowClassFormatError(klass.Get(), "Too many methods defined on class: %zd", actual_count);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006632 return false;
6633 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006634 // Shrink vtable if possible
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006635 CHECK_LE(actual_count, max_count);
6636 if (actual_count < max_count) {
Vladimir Marko3068d582019-05-28 16:39:29 +01006637 vtable.Assign(ObjPtr<mirror::PointerArray>::DownCast(
6638 mirror::Array::CopyOf(vtable, self, actual_count)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08006639 if (UNLIKELY(vtable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006640 self->AssertPendingOOMException();
Ian Rogersa436fde2013-08-27 23:34:06 -07006641 return false;
6642 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006643 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006644 klass->SetVTable(vtable.Get());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006645 } else {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006646 CHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(this));
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08006647 if (!IsUint<16>(num_virtual_methods)) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006648 ThrowClassFormatError(klass.Get(), "Too many methods: %d",
6649 static_cast<int>(num_virtual_methods));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006650 return false;
6651 }
Vladimir Markobcf17522018-06-01 13:14:32 +01006652 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, num_virtual_methods);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006653 if (UNLIKELY(vtable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006654 self->AssertPendingOOMException();
Ian Rogersa436fde2013-08-27 23:34:06 -07006655 return false;
6656 }
Brian Carlstroma40f9bc2011-07-26 21:26:07 -07006657 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006658 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6659 vtable->SetElementPtrSize(i, virtual_method, image_pointer_size_);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006660 virtual_method->SetMethodIndex(i & 0xFFFF);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006661 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07006662 klass->SetVTable(vtable);
Vladimir Marko43354742021-02-03 15:37:01 +00006663 InitializeObjectVirtualMethodHashes(klass.Get(),
6664 image_pointer_size_,
6665 ArrayRef<uint32_t>(object_virtual_method_hashes_));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006666 }
6667 return true;
6668}
6669
Alex Light9139e002015-10-09 15:59:48 -07006670// Determine if the given iface has any subinterface in the given list that declares the method
6671// specified by 'target'.
6672//
6673// Arguments
6674// - self: The thread we are running on
6675// - target: A comparator that will match any method that overrides the method we are checking for
6676// - iftable: The iftable we are searching for an overriding method on.
6677// - ifstart: The index of the interface we are checking to see if anything overrides
6678// - iface: The interface we are checking to see if anything overrides.
6679// - image_pointer_size:
6680// The image pointer size.
6681//
6682// Returns
6683// - True: There is some method that matches the target comparator defined in an interface that
6684// is a subtype of iface.
6685// - False: There is no method that matches the target comparator in any interface that is a subtype
6686// of iface.
6687static bool ContainsOverridingMethodOf(Thread* self,
6688 MethodNameAndSignatureComparator& target,
6689 Handle<mirror::IfTable> iftable,
6690 size_t ifstart,
6691 Handle<mirror::Class> iface,
Andreas Gampe542451c2016-07-26 09:02:02 -07006692 PointerSize image_pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006693 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light9139e002015-10-09 15:59:48 -07006694 DCHECK(self != nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08006695 DCHECK(iface != nullptr);
6696 DCHECK(iftable != nullptr);
Alex Light9139e002015-10-09 15:59:48 -07006697 DCHECK_GE(ifstart, 0u);
6698 DCHECK_LT(ifstart, iftable->Count());
6699 DCHECK_EQ(iface.Get(), iftable->GetInterface(ifstart));
6700 DCHECK(iface->IsInterface());
6701
6702 size_t iftable_count = iftable->Count();
6703 StackHandleScope<1> hs(self);
6704 MutableHandle<mirror::Class> current_iface(hs.NewHandle<mirror::Class>(nullptr));
6705 for (size_t k = ifstart + 1; k < iftable_count; k++) {
6706 // Skip ifstart since our current interface obviously cannot override itself.
6707 current_iface.Assign(iftable->GetInterface(k));
Alex Lighte64300b2015-12-15 15:02:47 -08006708 // Iterate through every method on this interface. The order does not matter.
6709 for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(image_pointer_size)) {
Alex Light9139e002015-10-09 15:59:48 -07006710 if (UNLIKELY(target.HasSameNameAndSignature(
Alex Lighte64300b2015-12-15 15:02:47 -08006711 current_method.GetInterfaceMethodIfProxy(image_pointer_size)))) {
Alex Light9139e002015-10-09 15:59:48 -07006712 // Check if the i'th interface is a subtype of this one.
6713 if (iface->IsAssignableFrom(current_iface.Get())) {
6714 return true;
6715 }
6716 break;
6717 }
6718 }
6719 }
6720 return false;
6721}
6722
Alex Lighteb7c1442015-08-31 13:17:42 -07006723// Find the default method implementation for 'interface_method' in 'klass'. Stores it into
Alex Light9139e002015-10-09 15:59:48 -07006724// out_default_method and returns kDefaultFound on success. If no default method was found return
6725// kAbstractFound and store nullptr into out_default_method. If an error occurs (such as a
6726// default_method conflict) it will return kDefaultConflict.
6727ClassLinker::DefaultMethodSearchResult ClassLinker::FindDefaultMethodImplementation(
6728 Thread* self,
6729 ArtMethod* target_method,
6730 Handle<mirror::Class> klass,
6731 /*out*/ArtMethod** out_default_method) const {
Alex Lighteb7c1442015-08-31 13:17:42 -07006732 DCHECK(self != nullptr);
6733 DCHECK(target_method != nullptr);
6734 DCHECK(out_default_method != nullptr);
Alex Lighteb7c1442015-08-31 13:17:42 -07006735
6736 *out_default_method = nullptr;
Alex Lighteb7c1442015-08-31 13:17:42 -07006737
6738 // We organize the interface table so that, for interface I any subinterfaces J follow it in the
6739 // table. This lets us walk the table backwards when searching for default methods. The first one
6740 // we encounter is the best candidate since it is the most specific. Once we have found it we keep
6741 // track of it and then continue checking all other interfaces, since we need to throw an error if
6742 // we encounter conflicting default method implementations (one is not a subtype of the other).
6743 //
6744 // The order of unrelated interfaces does not matter and is not defined.
6745 size_t iftable_count = klass->GetIfTableCount();
6746 if (iftable_count == 0) {
Alex Light9139e002015-10-09 15:59:48 -07006747 // No interfaces. We have already reset out to null so just return kAbstractFound.
6748 return DefaultMethodSearchResult::kAbstractFound;
Alex Lighteb7c1442015-08-31 13:17:42 -07006749 }
6750
Alex Light9139e002015-10-09 15:59:48 -07006751 StackHandleScope<3> hs(self);
6752 MutableHandle<mirror::Class> chosen_iface(hs.NewHandle<mirror::Class>(nullptr));
Alex Lighteb7c1442015-08-31 13:17:42 -07006753 MutableHandle<mirror::IfTable> iftable(hs.NewHandle(klass->GetIfTable()));
Alex Light9139e002015-10-09 15:59:48 -07006754 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
Alex Lighteb7c1442015-08-31 13:17:42 -07006755 MethodNameAndSignatureComparator target_name_comparator(
6756 target_method->GetInterfaceMethodIfProxy(image_pointer_size_));
6757 // Iterates over the klass's iftable in reverse
Alex Light9139e002015-10-09 15:59:48 -07006758 for (size_t k = iftable_count; k != 0; ) {
6759 --k;
6760
Alex Lighteb7c1442015-08-31 13:17:42 -07006761 DCHECK_LT(k, iftable->Count());
Alex Light9139e002015-10-09 15:59:48 -07006762
6763 iface.Assign(iftable->GetInterface(k));
Alex Lighte64300b2015-12-15 15:02:47 -08006764 // Iterate through every declared method on this interface. The order does not matter.
6765 for (auto& method_iter : iface->GetDeclaredVirtualMethods(image_pointer_size_)) {
6766 ArtMethod* current_method = &method_iter;
Alex Lighteb7c1442015-08-31 13:17:42 -07006767 // Skip abstract methods and methods with different names.
6768 if (current_method->IsAbstract() ||
6769 !target_name_comparator.HasSameNameAndSignature(
6770 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6771 continue;
Alex Lightd7c10c22016-03-31 10:03:07 -07006772 } else if (!current_method->IsPublic()) {
6773 // The verifier should have caught the non-public method for dex version 37. Just warn and
6774 // skip it since this is from before default-methods so we don't really need to care that it
6775 // has code.
David Sehr709b0702016-10-13 09:12:37 -07006776 LOG(WARNING) << "Interface method " << current_method->PrettyMethod()
6777 << " is not public! "
Alex Lightd7c10c22016-03-31 10:03:07 -07006778 << "This will be a fatal error in subsequent versions of android. "
6779 << "Continuing anyway.";
Alex Lighteb7c1442015-08-31 13:17:42 -07006780 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08006781 if (UNLIKELY(chosen_iface != nullptr)) {
Alex Light9139e002015-10-09 15:59:48 -07006782 // We have multiple default impls of the same method. This is a potential default conflict.
6783 // We need to check if this possibly conflicting method is either a superclass of the chosen
6784 // default implementation or is overridden by a non-default interface method. In either case
6785 // there is no conflict.
6786 if (!iface->IsAssignableFrom(chosen_iface.Get()) &&
6787 !ContainsOverridingMethodOf(self,
6788 target_name_comparator,
6789 iftable,
6790 k,
6791 iface,
6792 image_pointer_size_)) {
Nicolas Geoffray7f3e0db2016-01-28 09:29:31 +00006793 VLOG(class_linker) << "Conflicting default method implementations found: "
David Sehr709b0702016-10-13 09:12:37 -07006794 << current_method->PrettyMethod() << " and "
6795 << ArtMethod::PrettyMethod(*out_default_method) << " in class "
6796 << klass->PrettyClass() << " conflict.";
Alex Light9139e002015-10-09 15:59:48 -07006797 *out_default_method = nullptr;
6798 return DefaultMethodSearchResult::kDefaultConflict;
Alex Lighteb7c1442015-08-31 13:17:42 -07006799 } else {
6800 break; // Continue checking at the next interface.
6801 }
6802 } else {
Alex Light9139e002015-10-09 15:59:48 -07006803 // chosen_iface == null
6804 if (!ContainsOverridingMethodOf(self,
6805 target_name_comparator,
6806 iftable,
6807 k,
6808 iface,
6809 image_pointer_size_)) {
6810 // Don't set this as the chosen interface if something else is overriding it (because that
6811 // other interface would be potentially chosen instead if it was default). If the other
6812 // interface was abstract then we wouldn't select this interface as chosen anyway since
6813 // the abstract method masks it.
6814 *out_default_method = current_method;
6815 chosen_iface.Assign(iface.Get());
6816 // We should now finish traversing the graph to find if we have default methods that
6817 // conflict.
6818 } else {
David Sehr709b0702016-10-13 09:12:37 -07006819 VLOG(class_linker) << "A default method '" << current_method->PrettyMethod()
6820 << "' was "
6821 << "skipped because it was overridden by an abstract method in a "
6822 << "subinterface on class '" << klass->PrettyClass() << "'";
Alex Light9139e002015-10-09 15:59:48 -07006823 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006824 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006825 break;
6826 }
6827 }
Alex Light9139e002015-10-09 15:59:48 -07006828 if (*out_default_method != nullptr) {
David Sehr709b0702016-10-13 09:12:37 -07006829 VLOG(class_linker) << "Default method '" << (*out_default_method)->PrettyMethod()
6830 << "' selected "
6831 << "as the implementation for '" << target_method->PrettyMethod()
6832 << "' in '" << klass->PrettyClass() << "'";
Alex Light9139e002015-10-09 15:59:48 -07006833 return DefaultMethodSearchResult::kDefaultFound;
6834 } else {
6835 return DefaultMethodSearchResult::kAbstractFound;
6836 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006837}
6838
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006839ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006840 ArtMethod* conflict_method,
6841 ArtMethod* interface_method,
Nicolas Geoffray47213e42020-12-30 15:12:00 +00006842 ArtMethod* method) {
Andreas Gampe542451c2016-07-26 09:02:02 -07006843 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006844 Runtime* const runtime = Runtime::Current();
6845 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006846
6847 // Create a new entry if the existing one is the shared conflict method.
Nicolas Geoffray47213e42020-12-30 15:12:00 +00006848 ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006849 ? runtime->CreateImtConflictMethod(linear_alloc)
6850 : conflict_method;
6851
6852 // Allocate a new table. Note that we will leak this table at the next conflict,
6853 // but that's a tradeoff compared to making the table fixed size.
6854 void* data = linear_alloc->Alloc(
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006855 Thread::Current(), ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table,
6856 image_pointer_size_));
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006857 if (data == nullptr) {
6858 LOG(ERROR) << "Failed to allocate conflict table";
6859 return conflict_method;
6860 }
6861 ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6862 interface_method,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006863 method,
6864 image_pointer_size_);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006865
6866 // Do a fence to ensure threads see the data in the table before it is assigned
6867 // to the conflict method.
6868 // Note that there is a race in the presence of multiple threads and we may leak
6869 // memory from the LinearAlloc, but that's a tradeoff compared to using
6870 // atomic operations.
Orion Hodson27b96762018-03-13 16:06:57 +00006871 std::atomic_thread_fence(std::memory_order_release);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006872 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006873 return new_conflict_method;
6874}
6875
Vladimir Marko921094a2017-01-12 18:37:06 +00006876bool ClassLinker::AllocateIfTableMethodArrays(Thread* self,
6877 Handle<mirror::Class> klass,
6878 Handle<mirror::IfTable> iftable) {
6879 DCHECK(!klass->IsInterface());
6880 const bool has_superclass = klass->HasSuperClass();
6881 const bool extend_super_iftable = has_superclass;
6882 const size_t ifcount = klass->GetIfTableCount();
6883 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
6884 for (size_t i = 0; i < ifcount; ++i) {
6885 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
6886 if (num_methods > 0) {
6887 const bool is_super = i < super_ifcount;
6888 // This is an interface implemented by a super-class. Therefore we can just copy the method
6889 // array from the superclass.
6890 const bool super_interface = is_super && extend_super_iftable;
6891 ObjPtr<mirror::PointerArray> method_array;
6892 if (super_interface) {
6893 ObjPtr<mirror::IfTable> if_table = klass->GetSuperClass()->GetIfTable();
6894 DCHECK(if_table != nullptr);
6895 DCHECK(if_table->GetMethodArray(i) != nullptr);
6896 // If we are working on a super interface, try extending the existing method array.
Vladimir Marko3068d582019-05-28 16:39:29 +01006897 StackHandleScope<1u> hs(self);
6898 Handle<mirror::PointerArray> old_array = hs.NewHandle(if_table->GetMethodArray(i));
6899 method_array =
6900 ObjPtr<mirror::PointerArray>::DownCast(mirror::Object::Clone(old_array, self));
Vladimir Marko921094a2017-01-12 18:37:06 +00006901 } else {
6902 method_array = AllocPointerArray(self, num_methods);
6903 }
6904 if (UNLIKELY(method_array == nullptr)) {
6905 self->AssertPendingOOMException();
6906 return false;
6907 }
6908 iftable->SetMethodArray(i, method_array);
6909 }
6910 }
6911 return true;
6912}
6913
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006914void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6915 ArtMethod* imt_conflict_method,
6916 ArtMethod* current_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006917 /*out*/bool* new_conflict,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006918 /*out*/ArtMethod** imt_ref) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006919 // Place method in imt if entry is empty, place conflict otherwise.
6920 if (*imt_ref == unimplemented_method) {
6921 *imt_ref = current_method;
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006922 } else if (!(*imt_ref)->IsRuntimeMethod()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006923 // If we are not a conflict and we have the same signature and name as the imt
6924 // entry, it must be that we overwrote a superclass vtable entry.
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006925 // Note that we have checked IsRuntimeMethod, as there may be multiple different
6926 // conflict methods.
Alex Lighteb7c1442015-08-31 13:17:42 -07006927 MethodNameAndSignatureComparator imt_comparator(
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006928 (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
Alex Lighteb7c1442015-08-31 13:17:42 -07006929 if (imt_comparator.HasSameNameAndSignature(
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006930 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006931 *imt_ref = current_method;
6932 } else {
Alex Light9139e002015-10-09 15:59:48 -07006933 *imt_ref = imt_conflict_method;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006934 *new_conflict = true;
Alex Lighteb7c1442015-08-31 13:17:42 -07006935 }
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006936 } else {
6937 // Place the default conflict method. Note that there may be an existing conflict
6938 // method in the IMT, but it could be one tailored to the super class, with a
6939 // specific ImtConflictTable.
6940 *imt_ref = imt_conflict_method;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006941 *new_conflict = true;
Alex Lighteb7c1442015-08-31 13:17:42 -07006942 }
6943}
6944
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006945void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
David Sehr709b0702016-10-13 09:12:37 -07006946 DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6947 DCHECK(!klass->IsTemp()) << klass->PrettyClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006948 ArtMethod* imt_data[ImTable::kSize];
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006949 Runtime* const runtime = Runtime::Current();
6950 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6951 ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006952 std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006953 if (klass->GetIfTable() != nullptr) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006954 bool new_conflict = false;
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006955 FillIMTFromIfTable(klass->GetIfTable(),
6956 unimplemented_method,
6957 conflict_method,
6958 klass,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07006959 /*create_conflict_tables=*/true,
6960 /*ignore_copied_methods=*/false,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006961 &new_conflict,
6962 &imt_data[0]);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006963 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006964 // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6965 // we can just use the same pointer.
6966 ImTable* imt = nullptr;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006967 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006968 if (super_class != nullptr && super_class->ShouldHaveImt()) {
6969 ImTable* super_imt = super_class->GetImt(image_pointer_size_);
6970 bool same = true;
6971 for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6972 ArtMethod* method = imt_data[i];
6973 ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6974 if (method != super_method) {
6975 bool is_conflict_table = method->IsRuntimeMethod() &&
6976 method != unimplemented_method &&
6977 method != conflict_method;
6978 // Verify conflict contents.
6979 bool super_conflict_table = super_method->IsRuntimeMethod() &&
6980 super_method != unimplemented_method &&
6981 super_method != conflict_method;
6982 if (!is_conflict_table || !super_conflict_table) {
6983 same = false;
6984 } else {
6985 ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
6986 ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
6987 same = same && table1->Equals(table2, image_pointer_size_);
6988 }
6989 }
6990 }
6991 if (same) {
6992 imt = super_imt;
6993 }
6994 }
6995 if (imt == nullptr) {
6996 imt = klass->GetImt(image_pointer_size_);
6997 DCHECK(imt != nullptr);
6998 imt->Populate(imt_data, image_pointer_size_);
6999 } else {
7000 klass->SetImt(imt, image_pointer_size_);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007001 }
7002}
7003
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007004ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
7005 LinearAlloc* linear_alloc,
Andreas Gampe542451c2016-07-26 09:02:02 -07007006 PointerSize image_pointer_size) {
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007007 void* data = linear_alloc->Alloc(Thread::Current(),
7008 ImtConflictTable::ComputeSize(count,
7009 image_pointer_size));
7010 return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
7011}
7012
7013ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
7014 return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
7015}
7016
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007017void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007018 ArtMethod* unimplemented_method,
7019 ArtMethod* imt_conflict_method,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007020 ObjPtr<mirror::Class> klass,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007021 bool create_conflict_tables,
7022 bool ignore_copied_methods,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007023 /*out*/bool* new_conflict,
7024 /*out*/ArtMethod** imt) {
7025 uint32_t conflict_counts[ImTable::kSize] = {};
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007026 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007027 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007028 const size_t num_virtuals = interface->NumVirtualMethods();
7029 const size_t method_array_count = if_table->GetMethodArrayCount(i);
7030 // Virtual methods can be larger than the if table methods if there are default methods.
7031 DCHECK_GE(num_virtuals, method_array_count);
7032 if (kIsDebugBuild) {
7033 if (klass->IsInterface()) {
7034 DCHECK_EQ(method_array_count, 0u);
7035 } else {
7036 DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
7037 }
7038 }
7039 if (method_array_count == 0) {
7040 continue;
7041 }
Vladimir Marko557fece2019-03-26 14:29:41 +00007042 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007043 for (size_t j = 0; j < method_array_count; ++j) {
7044 ArtMethod* implementation_method =
7045 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
7046 if (ignore_copied_methods && implementation_method->IsCopied()) {
7047 continue;
7048 }
7049 DCHECK(implementation_method != nullptr);
7050 // Miranda methods cannot be used to implement an interface method, but they are safe to put
7051 // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
7052 // or interface methods in the IMT here they will not create extra conflicts since we compare
7053 // names and signatures in SetIMTRef.
7054 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
David Srbeckye36e7f22018-11-14 14:21:23 +00007055 const uint32_t imt_index = interface_method->GetImtIndex();
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007056
7057 // There is only any conflicts if all of the interface methods for an IMT slot don't have
7058 // the same implementation method, keep track of this to avoid creating a conflict table in
7059 // this case.
7060
7061 // Conflict table size for each IMT slot.
7062 ++conflict_counts[imt_index];
7063
7064 SetIMTRef(unimplemented_method,
7065 imt_conflict_method,
7066 implementation_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007067 /*out*/new_conflict,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007068 /*out*/&imt[imt_index]);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007069 }
7070 }
7071
7072 if (create_conflict_tables) {
7073 // Create the conflict tables.
7074 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007075 for (size_t i = 0; i < ImTable::kSize; ++i) {
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007076 size_t conflicts = conflict_counts[i];
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007077 if (imt[i] == imt_conflict_method) {
7078 ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
7079 if (new_table != nullptr) {
7080 ArtMethod* new_conflict_method =
7081 Runtime::Current()->CreateImtConflictMethod(linear_alloc);
7082 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
7083 imt[i] = new_conflict_method;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007084 } else {
7085 LOG(ERROR) << "Failed to allocate conflict table";
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007086 imt[i] = imt_conflict_method;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007087 }
7088 } else {
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007089 DCHECK_NE(imt[i], imt_conflict_method);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007090 }
7091 }
7092
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007093 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007094 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007095 const size_t method_array_count = if_table->GetMethodArrayCount(i);
7096 // Virtual methods can be larger than the if table methods if there are default methods.
7097 if (method_array_count == 0) {
7098 continue;
7099 }
Vladimir Marko557fece2019-03-26 14:29:41 +00007100 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007101 for (size_t j = 0; j < method_array_count; ++j) {
7102 ArtMethod* implementation_method =
7103 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
7104 if (ignore_copied_methods && implementation_method->IsCopied()) {
7105 continue;
7106 }
7107 DCHECK(implementation_method != nullptr);
7108 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
David Srbeckye36e7f22018-11-14 14:21:23 +00007109 const uint32_t imt_index = interface_method->GetImtIndex();
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007110 if (!imt[imt_index]->IsRuntimeMethod() ||
7111 imt[imt_index] == unimplemented_method ||
7112 imt[imt_index] == imt_conflict_method) {
7113 continue;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007114 }
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007115 ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
7116 const size_t num_entries = table->NumEntries(image_pointer_size_);
7117 table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
7118 table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007119 }
7120 }
7121 }
7122}
7123
Alex Lighteb7c1442015-08-31 13:17:42 -07007124// Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
7125// set.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007126static bool NotSubinterfaceOfAny(
Vladimir Marko782fb712020-12-23 12:47:31 +00007127 const HashSet<mirror::Class*>& classes,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007128 ObjPtr<mirror::Class> val)
Alex Lighteb7c1442015-08-31 13:17:42 -07007129 REQUIRES(Roles::uninterruptible_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07007130 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007131 DCHECK(val != nullptr);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007132 for (ObjPtr<mirror::Class> c : classes) {
7133 if (val->IsAssignableFrom(c)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007134 return false;
7135 }
7136 }
7137 return true;
7138}
7139
7140// Fills in and flattens the interface inheritance hierarchy.
7141//
7142// By the end of this function all interfaces in the transitive closure of to_process are added to
7143// the iftable and every interface precedes all of its sub-interfaces in this list.
7144//
7145// all I, J: Interface | I <: J implies J precedes I
7146//
7147// (note A <: B means that A is a subtype of B)
7148//
7149// This returns the total number of items in the iftable. The iftable might be resized down after
7150// this call.
7151//
7152// We order this backwards so that we do not need to reorder superclass interfaces when new
7153// interfaces are added in subclass's interface tables.
7154//
7155// Upon entry into this function iftable is a copy of the superclass's iftable with the first
7156// super_ifcount entries filled in with the transitive closure of the interfaces of the superclass.
7157// The other entries are uninitialized. We will fill in the remaining entries in this function. The
7158// iftable must be large enough to hold all interfaces without changing its size.
Vladimir Markob10668c2021-06-10 09:52:53 +01007159static size_t FillIfTable(ObjPtr<mirror::Class> klass,
Vladimir Marko782fb712020-12-23 12:47:31 +00007160 ObjPtr<mirror::ObjectArray<mirror::Class>> interfaces,
7161 ObjPtr<mirror::IfTable> iftable,
Alex Lighteb7c1442015-08-31 13:17:42 -07007162 size_t super_ifcount,
Vladimir Marko782fb712020-12-23 12:47:31 +00007163 size_t num_interfaces)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07007164 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko782fb712020-12-23 12:47:31 +00007165 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
7166 // This is the set of all classes already in the iftable. Used to make checking
7167 // if a class has already been added quicker.
7168 constexpr size_t kBufferSize = 32; // 256 bytes on 64-bit architectures.
7169 mirror::Class* buffer[kBufferSize];
7170 HashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize);
Alex Lighteb7c1442015-08-31 13:17:42 -07007171 // The first super_ifcount elements are from the superclass. We note that they are already added.
7172 for (size_t i = 0; i < super_ifcount; i++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007173 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
Alex Lighteb7c1442015-08-31 13:17:42 -07007174 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
Vladimir Marko782fb712020-12-23 12:47:31 +00007175 classes_in_iftable.insert(iface.Ptr());
Alex Lighteb7c1442015-08-31 13:17:42 -07007176 }
7177 size_t filled_ifcount = super_ifcount;
Vladimir Marko782fb712020-12-23 12:47:31 +00007178 const bool have_interfaces = interfaces != nullptr;
7179 for (size_t i = 0; i != num_interfaces; ++i) {
Vladimir Markob10668c2021-06-10 09:52:53 +01007180 ObjPtr<mirror::Class> interface =
7181 have_interfaces ? interfaces->Get(i) : klass->GetDirectInterface(i);
Vladimir Marko782fb712020-12-23 12:47:31 +00007182
Alex Lighteb7c1442015-08-31 13:17:42 -07007183 // Let us call the first filled_ifcount elements of iftable the current-iface-list.
7184 // At this point in the loop current-iface-list has the invariant that:
7185 // for every pair of interfaces I,J within it:
7186 // if index_of(I) < index_of(J) then I is not a subtype of J
7187
7188 // If we have already seen this element then all of its super-interfaces must already be in the
7189 // current-iface-list so we can skip adding it.
Vladimir Marko782fb712020-12-23 12:47:31 +00007190 if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07007191 // We haven't seen this interface so add all of its super-interfaces onto the
7192 // current-iface-list, skipping those already on it.
7193 int32_t ifcount = interface->GetIfTableCount();
7194 for (int32_t j = 0; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007195 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
Alex Lighteb7c1442015-08-31 13:17:42 -07007196 if (!ContainsElement(classes_in_iftable, super_interface)) {
7197 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
Vladimir Marko782fb712020-12-23 12:47:31 +00007198 classes_in_iftable.insert(super_interface.Ptr());
Alex Lighteb7c1442015-08-31 13:17:42 -07007199 iftable->SetInterface(filled_ifcount, super_interface);
7200 filled_ifcount++;
7201 }
7202 }
7203 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
7204 // Place this interface onto the current-iface-list after all of its super-interfaces.
Vladimir Marko782fb712020-12-23 12:47:31 +00007205 classes_in_iftable.insert(interface.Ptr());
Alex Lighteb7c1442015-08-31 13:17:42 -07007206 iftable->SetInterface(filled_ifcount, interface);
7207 filled_ifcount++;
7208 } else if (kIsDebugBuild) {
7209 // Check all super-interfaces are already in the list.
7210 int32_t ifcount = interface->GetIfTableCount();
7211 for (int32_t j = 0; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007212 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
Alex Lighteb7c1442015-08-31 13:17:42 -07007213 DCHECK(ContainsElement(classes_in_iftable, super_interface))
David Sehr709b0702016-10-13 09:12:37 -07007214 << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
7215 << ", a superinterface of " << interface->PrettyClass();
Alex Lighteb7c1442015-08-31 13:17:42 -07007216 }
7217 }
7218 }
7219 if (kIsDebugBuild) {
7220 // Check that the iftable is ordered correctly.
7221 for (size_t i = 0; i < filled_ifcount; i++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007222 ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
Alex Lighteb7c1442015-08-31 13:17:42 -07007223 for (size_t j = i + 1; j < filled_ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007224 ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
Alex Lighteb7c1442015-08-31 13:17:42 -07007225 // !(if_a <: if_b)
7226 CHECK(!if_b->IsAssignableFrom(if_a))
David Sehr709b0702016-10-13 09:12:37 -07007227 << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
7228 << ") extends "
7229 << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
Alex Lighteb7c1442015-08-31 13:17:42 -07007230 << "interface list.";
7231 }
7232 }
7233 }
7234 return filled_ifcount;
7235}
7236
Vladimir Marko782fb712020-12-23 12:47:31 +00007237bool ClassLinker::SetupInterfaceLookupTable(Thread* self,
7238 Handle<mirror::Class> klass,
Alex Lighteb7c1442015-08-31 13:17:42 -07007239 Handle<mirror::ObjectArray<mirror::Class>> interfaces) {
7240 StackHandleScope<1> hs(self);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007241 const bool has_superclass = klass->HasSuperClass();
7242 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
Andreas Gampefa4333d2017-02-14 11:10:34 -08007243 const bool have_interfaces = interfaces != nullptr;
Alex Lighteb7c1442015-08-31 13:17:42 -07007244 const size_t num_interfaces =
7245 have_interfaces ? interfaces->GetLength() : klass->NumDirectInterfaces();
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007246 if (num_interfaces == 0) {
7247 if (super_ifcount == 0) {
Mathieu Chartier6beced42016-11-15 15:51:31 -08007248 if (LIKELY(has_superclass)) {
7249 klass->SetIfTable(klass->GetSuperClass()->GetIfTable());
7250 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007251 // Class implements no interfaces.
7252 DCHECK_EQ(klass->GetIfTableCount(), 0);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007253 return true;
7254 }
Ian Rogers9bc81912012-10-11 21:43:36 -07007255 // Class implements same interfaces as parent, are any of these not marker interfaces?
7256 bool has_non_marker_interface = false;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007257 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007258 for (size_t i = 0; i < super_ifcount; ++i) {
Ian Rogers9bc81912012-10-11 21:43:36 -07007259 if (super_iftable->GetMethodArrayCount(i) > 0) {
7260 has_non_marker_interface = true;
7261 break;
7262 }
7263 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007264 // Class just inherits marker interfaces from parent so recycle parent's iftable.
Ian Rogers9bc81912012-10-11 21:43:36 -07007265 if (!has_non_marker_interface) {
Ian Rogers9bc81912012-10-11 21:43:36 -07007266 klass->SetIfTable(super_iftable);
7267 return true;
7268 }
7269 }
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007270 size_t ifcount = super_ifcount + num_interfaces;
Alex Lighteb7c1442015-08-31 13:17:42 -07007271 // Check that every class being implemented is an interface.
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007272 for (size_t i = 0; i < num_interfaces; i++) {
Vladimir Markob10668c2021-06-10 09:52:53 +01007273 ObjPtr<mirror::Class> interface =
7274 have_interfaces ? interfaces->GetWithoutChecks(i) : klass->GetDirectInterface(i);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007275 DCHECK(interface != nullptr);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007276 if (UNLIKELY(!interface->IsInterface())) {
7277 std::string temp;
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07007278 ThrowIncompatibleClassChangeError(klass.Get(),
7279 "Class %s implements non-interface class %s",
David Sehr709b0702016-10-13 09:12:37 -07007280 klass->PrettyDescriptor().c_str(),
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007281 PrettyDescriptor(interface->GetDescriptor(&temp)).c_str());
7282 return false;
7283 }
7284 ifcount += interface->GetIfTableCount();
7285 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007286 // Create the interface function table.
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07007287 MutableHandle<mirror::IfTable> iftable(hs.NewHandle(AllocIfTable(self, ifcount)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08007288 if (UNLIKELY(iftable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07007289 self->AssertPendingOOMException();
Ian Rogersa436fde2013-08-27 23:34:06 -07007290 return false;
7291 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007292 // Fill in table with superclass's iftable.
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07007293 if (super_ifcount != 0) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007294 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
Brian Carlstrom4b620ff2011-09-11 01:11:01 -07007295 for (size_t i = 0; i < super_ifcount; i++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007296 ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
Ian Rogers9bc81912012-10-11 21:43:36 -07007297 iftable->SetInterface(i, super_interface);
Brian Carlstrom4b620ff2011-09-11 01:11:01 -07007298 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07007299 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007300
7301 // Note that AllowThreadSuspension is to thread suspension as pthread_testcancel is to pthread
7302 // cancellation. That is it will suspend if one has a pending suspend request but otherwise
7303 // doesn't really do anything.
Ian Rogers7b078e82014-09-10 14:44:24 -07007304 self->AllowThreadSuspension();
Alex Lighteb7c1442015-08-31 13:17:42 -07007305
Vladimir Markob10668c2021-06-10 09:52:53 +01007306 const size_t new_ifcount =
7307 FillIfTable(klass.Get(), interfaces.Get(), iftable.Get(), super_ifcount, num_interfaces);
Alex Lighteb7c1442015-08-31 13:17:42 -07007308
Ian Rogers7b078e82014-09-10 14:44:24 -07007309 self->AllowThreadSuspension();
Alex Lighteb7c1442015-08-31 13:17:42 -07007310
Ian Rogersb52b01a2012-01-12 17:01:38 -08007311 // Shrink iftable in case duplicates were found
Alex Lighteb7c1442015-08-31 13:17:42 -07007312 if (new_ifcount < ifcount) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07007313 DCHECK_NE(num_interfaces, 0U);
Vladimir Markobcf17522018-06-01 13:14:32 +01007314 iftable.Assign(ObjPtr<mirror::IfTable>::DownCast(
Vladimir Marko3068d582019-05-28 16:39:29 +01007315 mirror::IfTable::CopyOf(iftable, self, new_ifcount * mirror::IfTable::kMax)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08007316 if (UNLIKELY(iftable == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07007317 self->AssertPendingOOMException();
Ian Rogersa436fde2013-08-27 23:34:06 -07007318 return false;
7319 }
Alex Lighteb7c1442015-08-31 13:17:42 -07007320 ifcount = new_ifcount;
Ian Rogersb52b01a2012-01-12 17:01:38 -08007321 } else {
Alex Lighteb7c1442015-08-31 13:17:42 -07007322 DCHECK_EQ(new_ifcount, ifcount);
Ian Rogersb52b01a2012-01-12 17:01:38 -08007323 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07007324 klass->SetIfTable(iftable.Get());
Alex Lighteb7c1442015-08-31 13:17:42 -07007325 return true;
7326}
7327
Alex Light1f3925d2016-09-07 12:04:20 -07007328// Finds the method with a name/signature that matches cmp in the given lists of methods. The list
7329// of methods must be unique.
7330static ArtMethod* FindSameNameAndSignature(MethodNameAndSignatureComparator& cmp ATTRIBUTE_UNUSED) {
7331 return nullptr;
7332}
7333
7334template <typename ... Types>
Alex Light9139e002015-10-09 15:59:48 -07007335static ArtMethod* FindSameNameAndSignature(MethodNameAndSignatureComparator& cmp,
Alex Light1f3925d2016-09-07 12:04:20 -07007336 const ScopedArenaVector<ArtMethod*>& list,
7337 const Types& ... rest)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07007338 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light9139e002015-10-09 15:59:48 -07007339 for (ArtMethod* method : list) {
7340 if (cmp.HasSameNameAndSignature(method)) {
7341 return method;
7342 }
7343 }
Alex Light1f3925d2016-09-07 12:04:20 -07007344 return FindSameNameAndSignature(cmp, rest...);
Alex Light9139e002015-10-09 15:59:48 -07007345}
7346
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007347namespace {
7348
Alex Light1f3925d2016-09-07 12:04:20 -07007349// Check that all vtable entries are present in this class's virtuals or are the same as a
7350// superclasses vtable entry.
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007351void CheckClassOwnsVTableEntries(Thread* self,
7352 Handle<mirror::Class> klass,
7353 PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07007354 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light1f3925d2016-09-07 12:04:20 -07007355 StackHandleScope<2> hs(self);
7356 Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007357 ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
Alex Light1f3925d2016-09-07 12:04:20 -07007358 Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
Andreas Gampefa4333d2017-02-14 11:10:34 -08007359 int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
Alex Lighte64300b2015-12-15 15:02:47 -08007360 for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
7361 ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
7362 CHECK(m != nullptr);
7363
Alex Lighta41a30782017-03-29 11:33:19 -07007364 if (m->GetMethodIndexDuringLinking() != i) {
7365 LOG(WARNING) << m->PrettyMethod()
7366 << " has an unexpected method index for its spot in the vtable for class"
7367 << klass->PrettyClass();
7368 }
Alex Lighte64300b2015-12-15 15:02:47 -08007369 ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
7370 auto is_same_method = [m] (const ArtMethod& meth) {
7371 return &meth == m;
7372 };
Alex Light3f980532017-03-17 15:10:32 -07007373 if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
7374 std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
7375 LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
7376 << klass->PrettyClass() << " or any of its superclasses!";
7377 }
Alex Lighte64300b2015-12-15 15:02:47 -08007378 }
7379}
7380
Alex Light1f3925d2016-09-07 12:04:20 -07007381// Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
7382// method is overridden in a subclass.
Andreas Gampea2fed082019-02-01 09:34:43 -08007383template <PointerSize kPointerSize>
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007384void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
Alex Light1f3925d2016-09-07 12:04:20 -07007385 REQUIRES_SHARED(Locks::mutator_lock_) {
7386 StackHandleScope<1> hs(self);
7387 Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7388 int32_t num_entries = vtable->GetLength();
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007389
7390 // Observations:
7391 // * The older implementation was O(n^2) and got too expensive for apps with larger classes.
7392 // * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
7393 // for many classes outside of libcore a cross-dexfile check has to be run anyways.
7394 // * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
7395 // to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
7396 // * The single-pass algorithm will trade memory for speed, but that is OK.
7397
7398 CHECK_GT(num_entries, 0);
7399
7400 auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
7401 ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
7402 ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7403 LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
7404 << klass->PrettyClass() << " in method " << m1->PrettyMethod()
7405 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
7406 << m2->PrettyMethod() << " (0x" << std::hex
7407 << reinterpret_cast<uintptr_t>(m2) << ")";
7408 };
7409 struct BaseHashType {
7410 static size_t HashCombine(size_t seed, size_t val) {
7411 return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
7412 }
7413 };
7414
7415 // Check assuming all entries come from the same dex file.
7416 {
7417 // Find the first interesting method and its dex file.
7418 int32_t start = 0;
7419 for (; start < num_entries; ++start) {
7420 ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
7421 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7422 // maybe).
7423 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7424 vtable_entry->GetAccessFlags())) {
7425 continue;
7426 }
7427 break;
7428 }
7429 if (start == num_entries) {
7430 return;
7431 }
7432 const DexFile* dex_file =
7433 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
7434 GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
7435
7436 // Helper function to avoid logging if we have to run the cross-file checks.
7437 auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
7438 // Use a map to store seen entries, as the storage space is too large for a bitvector.
7439 using PairType = std::pair<uint32_t, uint16_t>;
7440 struct PairHash : BaseHashType {
7441 size_t operator()(const PairType& key) const {
7442 return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
7443 }
7444 };
Vladimir Marko782fb712020-12-23 12:47:31 +00007445 HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007446 seen.reserve(2 * num_entries);
7447 bool need_slow_path = false;
7448 bool found_dup = false;
7449 for (int i = start; i < num_entries; ++i) {
7450 // Can use Unchecked here as the start loop already ensured that the arrays are correct
7451 // wrt/ kPointerSize.
7452 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7453 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7454 vtable_entry->GetAccessFlags())) {
7455 continue;
7456 }
7457 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7458 if (dex_file != m->GetDexFile()) {
7459 need_slow_path = true;
7460 break;
7461 }
7462 const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
7463 PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
7464 auto it = seen.find(pair);
7465 if (it != seen.end()) {
7466 found_dup = true;
7467 if (log_warn) {
7468 log_fn(it->second, i);
7469 }
7470 } else {
Vladimir Marko782fb712020-12-23 12:47:31 +00007471 seen.insert(std::make_pair(pair, i));
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007472 }
7473 }
7474 return std::make_pair(need_slow_path, found_dup);
7475 };
7476 std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
7477 if (!result.first) {
7478 if (result.second) {
7479 check_fn(/* log_warn= */ true);
7480 }
7481 return;
7482 }
7483 }
7484
7485 // Need to check across dex files.
7486 struct Entry {
7487 size_t cached_hash = 0;
Vladimir Markoaa027b82021-01-06 20:34:20 +00007488 uint32_t name_len = 0;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007489 const char* name = nullptr;
7490 Signature signature = Signature::NoSignature();
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007491
Vladimir Marko782fb712020-12-23 12:47:31 +00007492 Entry() = default;
7493 Entry(const Entry& other) = default;
7494 Entry& operator=(const Entry& other) = default;
7495
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007496 Entry(const DexFile* dex_file, const dex::MethodId& mid)
Vladimir Markoaa027b82021-01-06 20:34:20 +00007497 : name_len(0), // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
7498 // This call writes `name_len` and it is therefore necessary that the
7499 // initializer for `name_len` comes before it, otherwise the value
7500 // from the call would be overwritten by that initializer.
7501 name(dex_file->StringDataAndUtf16LengthByIdx(mid.name_idx_, &name_len)),
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007502 signature(dex_file->GetMethodSignature(mid)) {
Vladimir Markoaa027b82021-01-06 20:34:20 +00007503 // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
7504 if (name[name_len] != 0) {
7505 name_len += strlen(name + name_len);
7506 }
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007507 }
7508
7509 bool operator==(const Entry& other) const {
Vladimir Marko782fb712020-12-23 12:47:31 +00007510 return name_len == other.name_len &&
7511 memcmp(name, other.name, name_len) == 0 &&
7512 signature == other.signature;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007513 }
7514 };
7515 struct EntryHash {
7516 size_t operator()(const Entry& key) const {
7517 return key.cached_hash;
7518 }
7519 };
Vladimir Marko782fb712020-12-23 12:47:31 +00007520 HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007521 for (int32_t i = 0; i < num_entries; ++i) {
7522 // Can use Unchecked here as the first loop already ensured that the arrays are correct
7523 // wrt/ kPointerSize.
7524 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7525 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7526 // maybe).
Alex Light1f3925d2016-09-07 12:04:20 -07007527 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7528 vtable_entry->GetAccessFlags())) {
7529 continue;
7530 }
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007531 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7532 const DexFile* dex_file = m->GetDexFile();
7533 const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7534
7535 Entry e(dex_file, mid);
7536
7537 size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7538 size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7539 e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7540 sig_hash);
7541
7542 auto it = map.find(e);
7543 if (it != map.end()) {
7544 log_fn(it->second, i);
7545 } else {
Vladimir Marko782fb712020-12-23 12:47:31 +00007546 map.insert(std::make_pair(e, i));
Alex Light1f3925d2016-09-07 12:04:20 -07007547 }
7548 }
7549}
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007550
7551void CheckVTableHasNoDuplicates(Thread* self,
7552 Handle<mirror::Class> klass,
7553 PointerSize pointer_size)
Andreas Gampea2fed082019-02-01 09:34:43 -08007554 REQUIRES_SHARED(Locks::mutator_lock_) {
7555 switch (pointer_size) {
7556 case PointerSize::k64:
7557 CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7558 break;
7559 case PointerSize::k32:
7560 CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7561 break;
7562 }
7563}
Alex Light1f3925d2016-09-07 12:04:20 -07007564
Orion Hodson5880c772020-07-28 20:12:08 +01007565static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
Alex Light1f3925d2016-09-07 12:04:20 -07007566 REQUIRES_SHARED(Locks::mutator_lock_) {
7567 CheckClassOwnsVTableEntries(self, klass, pointer_size);
7568 CheckVTableHasNoDuplicates(self, klass, pointer_size);
7569}
7570
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007571} // namespace
7572
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007573void ClassLinker::FillImtFromSuperClass(Handle<mirror::Class> klass,
7574 ArtMethod* unimplemented_method,
7575 ArtMethod* imt_conflict_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007576 bool* new_conflict,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007577 ArtMethod** imt) {
Alex Light705ad492015-09-21 11:36:30 -07007578 DCHECK(klass->HasSuperClass());
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007579 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007580 if (super_class->ShouldHaveImt()) {
7581 ImTable* super_imt = super_class->GetImt(image_pointer_size_);
7582 for (size_t i = 0; i < ImTable::kSize; ++i) {
7583 imt[i] = super_imt->Get(i, image_pointer_size_);
Alex Light705ad492015-09-21 11:36:30 -07007584 }
7585 } else {
7586 // No imt in the super class, need to reconstruct from the iftable.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07007587 ObjPtr<mirror::IfTable> if_table = super_class->GetIfTable();
Mathieu Chartier6beced42016-11-15 15:51:31 -08007588 if (if_table->Count() != 0) {
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07007589 // Ignore copied methods since we will handle these in LinkInterfaceMethods.
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007590 FillIMTFromIfTable(if_table,
7591 unimplemented_method,
7592 imt_conflict_method,
7593 klass.Get(),
Andreas Gampe98ea9d92018-10-19 14:06:15 -07007594 /*create_conflict_tables=*/false,
7595 /*ignore_copied_methods=*/true,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007596 /*out*/new_conflict,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07007597 /*out*/imt);
Alex Light705ad492015-09-21 11:36:30 -07007598 }
7599 }
7600}
7601
Vladimir Marko921094a2017-01-12 18:37:06 +00007602class ClassLinker::LinkInterfaceMethodsHelper {
7603 public:
7604 LinkInterfaceMethodsHelper(ClassLinker* class_linker,
7605 Handle<mirror::Class> klass,
7606 Thread* self,
7607 Runtime* runtime)
7608 : class_linker_(class_linker),
7609 klass_(klass),
7610 method_alignment_(ArtMethod::Alignment(class_linker->GetImagePointerSize())),
7611 method_size_(ArtMethod::Size(class_linker->GetImagePointerSize())),
7612 self_(self),
7613 stack_(runtime->GetLinearAlloc()->GetArenaPool()),
7614 allocator_(&stack_),
7615 default_conflict_methods_(allocator_.Adapter()),
7616 overriding_default_conflict_methods_(allocator_.Adapter()),
7617 miranda_methods_(allocator_.Adapter()),
7618 default_methods_(allocator_.Adapter()),
7619 overriding_default_methods_(allocator_.Adapter()),
7620 move_table_(allocator_.Adapter()) {
7621 }
7622
7623 ArtMethod* FindMethod(ArtMethod* interface_method,
7624 MethodNameAndSignatureComparator& interface_name_comparator,
7625 ArtMethod* vtable_impl)
7626 REQUIRES_SHARED(Locks::mutator_lock_);
7627
7628 ArtMethod* GetOrCreateMirandaMethod(ArtMethod* interface_method,
7629 MethodNameAndSignatureComparator& interface_name_comparator)
7630 REQUIRES_SHARED(Locks::mutator_lock_);
7631
7632 bool HasNewVirtuals() const {
7633 return !(miranda_methods_.empty() &&
7634 default_methods_.empty() &&
7635 overriding_default_methods_.empty() &&
7636 overriding_default_conflict_methods_.empty() &&
7637 default_conflict_methods_.empty());
7638 }
7639
7640 void ReallocMethods() REQUIRES_SHARED(Locks::mutator_lock_);
7641
7642 ObjPtr<mirror::PointerArray> UpdateVtable(
Vladimir Marko782fb712020-12-23 12:47:31 +00007643 const HashMap<size_t, ClassLinker::MethodTranslation>& default_translations,
Vladimir Marko3068d582019-05-28 16:39:29 +01007644 Handle<mirror::PointerArray> old_vtable) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko921094a2017-01-12 18:37:06 +00007645
7646 void UpdateIfTable(Handle<mirror::IfTable> iftable) REQUIRES_SHARED(Locks::mutator_lock_);
7647
7648 void UpdateIMT(ArtMethod** out_imt);
7649
7650 void CheckNoStaleMethodsInDexCache() REQUIRES_SHARED(Locks::mutator_lock_) {
7651 if (kIsDebugBuild) {
7652 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7653 // Check that there are no stale methods are in the dex cache array.
7654 auto* resolved_methods = klass_->GetDexCache()->GetResolvedMethods();
7655 for (size_t i = 0, count = klass_->GetDexCache()->NumResolvedMethods(); i < count; ++i) {
David Srbecky5de5efe2021-02-15 21:23:00 +00007656 auto pair = mirror::DexCache::GetNativePair(resolved_methods, i);
Vladimir Marko07bfbac2017-07-06 14:55:02 +01007657 ArtMethod* m = pair.object;
Vladimir Marko921094a2017-01-12 18:37:06 +00007658 CHECK(move_table_.find(m) == move_table_.end() ||
7659 // The original versions of copied methods will still be present so allow those too.
7660 // Note that if the first check passes this might fail to GetDeclaringClass().
7661 std::find_if(m->GetDeclaringClass()->GetMethods(pointer_size).begin(),
7662 m->GetDeclaringClass()->GetMethods(pointer_size).end(),
7663 [m] (ArtMethod& meth) {
7664 return &meth == m;
7665 }) != m->GetDeclaringClass()->GetMethods(pointer_size).end())
7666 << "Obsolete method " << m->PrettyMethod() << " is in dex cache!";
7667 }
7668 }
7669 }
7670
7671 void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7672 LengthPrefixedArray<ArtMethod>* methods) {
7673 if (kIsDebugBuild) {
7674 CHECK(methods != nullptr);
7675 // Put some random garbage in old methods to help find stale pointers.
7676 if (methods != old_methods && old_methods != nullptr) {
7677 // Need to make sure the GC is not running since it could be scanning the methods we are
7678 // about to overwrite.
7679 ScopedThreadStateChange tsc(self_, kSuspended);
7680 gc::ScopedGCCriticalSection gcs(self_,
7681 gc::kGcCauseClassLinker,
7682 gc::kCollectorTypeClassLinker);
7683 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
7684 method_size_,
7685 method_alignment_);
7686 memset(old_methods, 0xFEu, old_size);
7687 }
7688 }
7689 }
7690
7691 private:
7692 size_t NumberOfNewVirtuals() const {
7693 return miranda_methods_.size() +
7694 default_methods_.size() +
7695 overriding_default_conflict_methods_.size() +
7696 overriding_default_methods_.size() +
7697 default_conflict_methods_.size();
7698 }
7699
7700 bool FillTables() REQUIRES_SHARED(Locks::mutator_lock_) {
7701 return !klass_->IsInterface();
7702 }
7703
7704 void LogNewVirtuals() const REQUIRES_SHARED(Locks::mutator_lock_) {
7705 DCHECK(!klass_->IsInterface() || (default_methods_.empty() && miranda_methods_.empty()))
7706 << "Interfaces should only have default-conflict methods appended to them.";
7707 VLOG(class_linker) << mirror::Class::PrettyClass(klass_.Get()) << ": miranda_methods="
7708 << miranda_methods_.size()
7709 << " default_methods=" << default_methods_.size()
7710 << " overriding_default_methods=" << overriding_default_methods_.size()
7711 << " default_conflict_methods=" << default_conflict_methods_.size()
7712 << " overriding_default_conflict_methods="
7713 << overriding_default_conflict_methods_.size();
7714 }
7715
7716 ClassLinker* class_linker_;
7717 Handle<mirror::Class> klass_;
7718 size_t method_alignment_;
7719 size_t method_size_;
7720 Thread* const self_;
7721
7722 // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
7723 // the virtual methods array.
7724 // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
7725 // during cross compilation.
7726 // Use the linear alloc pool since this one is in the low 4gb for the compiler.
7727 ArenaStack stack_;
7728 ScopedArenaAllocator allocator_;
7729
7730 ScopedArenaVector<ArtMethod*> default_conflict_methods_;
7731 ScopedArenaVector<ArtMethod*> overriding_default_conflict_methods_;
7732 ScopedArenaVector<ArtMethod*> miranda_methods_;
7733 ScopedArenaVector<ArtMethod*> default_methods_;
7734 ScopedArenaVector<ArtMethod*> overriding_default_methods_;
7735
7736 ScopedArenaUnorderedMap<ArtMethod*, ArtMethod*> move_table_;
7737};
7738
7739ArtMethod* ClassLinker::LinkInterfaceMethodsHelper::FindMethod(
7740 ArtMethod* interface_method,
7741 MethodNameAndSignatureComparator& interface_name_comparator,
7742 ArtMethod* vtable_impl) {
7743 ArtMethod* current_method = nullptr;
7744 switch (class_linker_->FindDefaultMethodImplementation(self_,
7745 interface_method,
7746 klass_,
7747 /*out*/&current_method)) {
7748 case DefaultMethodSearchResult::kDefaultConflict: {
7749 // Default method conflict.
7750 DCHECK(current_method == nullptr);
7751 ArtMethod* default_conflict_method = nullptr;
7752 if (vtable_impl != nullptr && vtable_impl->IsDefaultConflicting()) {
7753 // We can reuse the method from the superclass, don't bother adding it to virtuals.
7754 default_conflict_method = vtable_impl;
7755 } else {
7756 // See if we already have a conflict method for this method.
7757 ArtMethod* preexisting_conflict = FindSameNameAndSignature(
7758 interface_name_comparator,
7759 default_conflict_methods_,
7760 overriding_default_conflict_methods_);
7761 if (LIKELY(preexisting_conflict != nullptr)) {
7762 // We already have another conflict we can reuse.
7763 default_conflict_method = preexisting_conflict;
7764 } else {
7765 // Note that we do this even if we are an interface since we need to create this and
7766 // cannot reuse another classes.
7767 // Create a new conflict method for this to use.
7768 default_conflict_method = reinterpret_cast<ArtMethod*>(allocator_.Alloc(method_size_));
7769 new(default_conflict_method) ArtMethod(interface_method,
7770 class_linker_->GetImagePointerSize());
7771 if (vtable_impl == nullptr) {
7772 // Save the conflict method. We need to add it to the vtable.
7773 default_conflict_methods_.push_back(default_conflict_method);
7774 } else {
7775 // Save the conflict method but it is already in the vtable.
7776 overriding_default_conflict_methods_.push_back(default_conflict_method);
7777 }
7778 }
7779 }
7780 current_method = default_conflict_method;
7781 break;
7782 } // case kDefaultConflict
7783 case DefaultMethodSearchResult::kDefaultFound: {
7784 DCHECK(current_method != nullptr);
7785 // Found a default method.
7786 if (vtable_impl != nullptr &&
7787 current_method->GetDeclaringClass() == vtable_impl->GetDeclaringClass()) {
7788 // We found a default method but it was the same one we already have from our
7789 // superclass. Don't bother adding it to our vtable again.
7790 current_method = vtable_impl;
7791 } else if (LIKELY(FillTables())) {
7792 // Interfaces don't need to copy default methods since they don't have vtables.
7793 // Only record this default method if it is new to save space.
7794 // TODO It might be worthwhile to copy default methods on interfaces anyway since it
7795 // would make lookup for interface super much faster. (We would only need to scan
7796 // the iftable to find if there is a NSME or AME.)
7797 ArtMethod* old = FindSameNameAndSignature(interface_name_comparator,
7798 default_methods_,
7799 overriding_default_methods_);
7800 if (old == nullptr) {
7801 // We found a default method implementation and there were no conflicts.
7802 if (vtable_impl == nullptr) {
7803 // Save the default method. We need to add it to the vtable.
7804 default_methods_.push_back(current_method);
7805 } else {
7806 // Save the default method but it is already in the vtable.
7807 overriding_default_methods_.push_back(current_method);
7808 }
7809 } else {
7810 CHECK(old == current_method) << "Multiple default implementations selected!";
7811 }
7812 }
7813 break;
7814 } // case kDefaultFound
7815 case DefaultMethodSearchResult::kAbstractFound: {
7816 DCHECK(current_method == nullptr);
7817 // Abstract method masks all defaults.
7818 if (vtable_impl != nullptr &&
7819 vtable_impl->IsAbstract() &&
7820 !vtable_impl->IsDefaultConflicting()) {
7821 // We need to make this an abstract method but the version in the vtable already is so
7822 // don't do anything.
7823 current_method = vtable_impl;
7824 }
7825 break;
7826 } // case kAbstractFound
7827 }
7828 return current_method;
7829}
7830
7831ArtMethod* ClassLinker::LinkInterfaceMethodsHelper::GetOrCreateMirandaMethod(
7832 ArtMethod* interface_method,
7833 MethodNameAndSignatureComparator& interface_name_comparator) {
7834 // Find out if there is already a miranda method we can use.
7835 ArtMethod* miranda_method = FindSameNameAndSignature(interface_name_comparator,
7836 miranda_methods_);
7837 if (miranda_method == nullptr) {
7838 DCHECK(interface_method->IsAbstract()) << interface_method->PrettyMethod();
7839 miranda_method = reinterpret_cast<ArtMethod*>(allocator_.Alloc(method_size_));
7840 CHECK(miranda_method != nullptr);
7841 // Point the interface table at a phantom slot.
7842 new(miranda_method) ArtMethod(interface_method, class_linker_->GetImagePointerSize());
7843 miranda_methods_.push_back(miranda_method);
7844 }
7845 return miranda_method;
7846}
7847
7848void ClassLinker::LinkInterfaceMethodsHelper::ReallocMethods() {
7849 LogNewVirtuals();
7850
7851 const size_t old_method_count = klass_->NumMethods();
7852 const size_t new_method_count = old_method_count + NumberOfNewVirtuals();
7853 DCHECK_NE(old_method_count, new_method_count);
7854
7855 // Attempt to realloc to save RAM if possible.
7856 LengthPrefixedArray<ArtMethod>* old_methods = klass_->GetMethodsPtr();
7857 // The Realloced virtual methods aren't visible from the class roots, so there is no issue
7858 // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
7859 // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
7860 // CopyFrom has internal read barriers.
7861 //
7862 // TODO We should maybe move some of this into mirror::Class or at least into another method.
7863 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
7864 method_size_,
7865 method_alignment_);
7866 const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
7867 method_size_,
7868 method_alignment_);
7869 const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
7870 auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
Nicolas Geoffray48b40cc2017-08-07 16:52:40 +01007871 class_linker_->GetAllocatorForClassLoader(klass_->GetClassLoader())->Realloc(
Vladimir Marko921094a2017-01-12 18:37:06 +00007872 self_, old_methods, old_methods_ptr_size, new_size));
7873 CHECK(methods != nullptr); // Native allocation failure aborts.
7874
7875 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7876 if (methods != old_methods) {
7877 // Maps from heap allocated miranda method to linear alloc miranda method.
7878 StrideIterator<ArtMethod> out = methods->begin(method_size_, method_alignment_);
7879 // Copy over the old methods.
7880 for (auto& m : klass_->GetMethods(pointer_size)) {
7881 move_table_.emplace(&m, &*out);
7882 // The CopyFrom is only necessary to not miss read barriers since Realloc won't do read
7883 // barriers when it copies.
7884 out->CopyFrom(&m, pointer_size);
7885 ++out;
7886 }
7887 }
7888 StrideIterator<ArtMethod> out(methods->begin(method_size_, method_alignment_) + old_method_count);
7889 // Copy over miranda methods before copying vtable since CopyOf may cause thread suspension and
7890 // we want the roots of the miranda methods to get visited.
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007891 for (size_t i = 0; i < miranda_methods_.size(); ++i) {
7892 ArtMethod* mir_method = miranda_methods_[i];
Vladimir Marko921094a2017-01-12 18:37:06 +00007893 ArtMethod& new_method = *out;
7894 new_method.CopyFrom(mir_method, pointer_size);
Vladimir Markode0d0de2021-03-18 14:12:35 +00007895 uint32_t access_flags = new_method.GetAccessFlags();
7896 DCHECK_EQ(access_flags & kAccIntrinsic, 0u) << "Miranda method should not be an intrinsic!";
7897 DCHECK_EQ(access_flags & kAccDefault, 0u) << "Miranda method should not be a default method!";
7898 DCHECK_NE(access_flags & kAccAbstract, 0u) << "Miranda method should be abstract!";
7899 new_method.SetAccessFlags(access_flags | kAccCopied);
Vladimir Marko921094a2017-01-12 18:37:06 +00007900 move_table_.emplace(mir_method, &new_method);
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007901 // Update the entry in the method array, as the array will be used for future lookups,
7902 // where thread suspension is allowed.
7903 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7904 // would not see them.
7905 miranda_methods_[i] = &new_method;
Vladimir Marko921094a2017-01-12 18:37:06 +00007906 ++out;
7907 }
7908 // We need to copy the default methods into our own method table since the runtime requires that
7909 // every method on a class's vtable be in that respective class's virtual method table.
7910 // NOTE This means that two classes might have the same implementation of a method from the same
7911 // interface but will have different ArtMethod*s for them. This also means we cannot compare a
7912 // default method found on a class with one found on the declaring interface directly and must
7913 // look at the declaring class to determine if they are the same.
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007914 for (ScopedArenaVector<ArtMethod*>* methods_vec : {&default_methods_,
7915 &overriding_default_methods_}) {
7916 for (size_t i = 0; i < methods_vec->size(); ++i) {
7917 ArtMethod* def_method = (*methods_vec)[i];
Vladimir Marko921094a2017-01-12 18:37:06 +00007918 ArtMethod& new_method = *out;
7919 new_method.CopyFrom(def_method, pointer_size);
7920 // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
7921 // verified yet it shouldn't have methods that are skipping access checks.
7922 // TODO This is rather arbitrary. We should maybe support classes where only some of its
7923 // methods are skip_access_checks.
Vladimir Markob0a6aee2017-10-27 10:34:04 +01007924 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
Vladimir Marko921094a2017-01-12 18:37:06 +00007925 constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
7926 constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
7927 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
7928 move_table_.emplace(def_method, &new_method);
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007929 // Update the entry in the method array, as the array will be used for future lookups,
7930 // where thread suspension is allowed.
7931 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7932 // would not see them.
7933 (*methods_vec)[i] = &new_method;
Vladimir Marko921094a2017-01-12 18:37:06 +00007934 ++out;
7935 }
7936 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007937 for (ScopedArenaVector<ArtMethod*>* methods_vec : {&default_conflict_methods_,
7938 &overriding_default_conflict_methods_}) {
7939 for (size_t i = 0; i < methods_vec->size(); ++i) {
7940 ArtMethod* conf_method = (*methods_vec)[i];
Vladimir Marko921094a2017-01-12 18:37:06 +00007941 ArtMethod& new_method = *out;
7942 new_method.CopyFrom(conf_method, pointer_size);
7943 // This is a type of default method (there are default method impls, just a conflict) so
Vladimir Markode0d0de2021-03-18 14:12:35 +00007944 // mark this as a default. We use the `kAccAbstract` flag to distinguish it from invokable
7945 // copied default method without using a separate access flag but the default conflicting
7946 // method is technically not abstract and ArtMethod::IsAbstract() shall return false.
7947 // Also clear the kAccSkipAccessChecks bit since this class hasn't been verified yet it
7948 // shouldn't have methods that are skipping access checks. Also clear potential
7949 // kAccSingleImplementation to avoid CHA trying to inline the default method.
7950 uint32_t access_flags = new_method.GetAccessFlags();
7951 DCHECK_EQ(access_flags & kAccNative, 0u);
7952 DCHECK_EQ(access_flags & kAccIntrinsic, 0u);
7953 constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
7954 constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
7955 new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
Vladimir Marko921094a2017-01-12 18:37:06 +00007956 DCHECK(new_method.IsDefaultConflicting());
Vladimir Markode0d0de2021-03-18 14:12:35 +00007957 DCHECK(!new_method.IsAbstract());
Vladimir Marko921094a2017-01-12 18:37:06 +00007958 // The actual method might or might not be marked abstract since we just copied it from a
7959 // (possibly default) interface method. We need to set it entry point to be the bridge so
7960 // that the compiler will not invoke the implementation of whatever method we copied from.
7961 EnsureThrowsInvocationError(class_linker_, &new_method);
7962 move_table_.emplace(conf_method, &new_method);
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00007963 // Update the entry in the method array, as the array will be used for future lookups,
7964 // where thread suspension is allowed.
7965 // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7966 // would not see them.
7967 (*methods_vec)[i] = &new_method;
Vladimir Marko921094a2017-01-12 18:37:06 +00007968 ++out;
7969 }
7970 }
7971 methods->SetSize(new_method_count);
7972 class_linker_->UpdateClassMethods(klass_.Get(), methods);
7973}
7974
7975ObjPtr<mirror::PointerArray> ClassLinker::LinkInterfaceMethodsHelper::UpdateVtable(
Vladimir Marko782fb712020-12-23 12:47:31 +00007976 const HashMap<size_t, ClassLinker::MethodTranslation>& default_translations,
Vladimir Marko3068d582019-05-28 16:39:29 +01007977 Handle<mirror::PointerArray> old_vtable) {
Vladimir Marko921094a2017-01-12 18:37:06 +00007978 // Update the vtable to the new method structures. We can skip this for interfaces since they
7979 // do not have vtables.
7980 const size_t old_vtable_count = old_vtable->GetLength();
7981 const size_t new_vtable_count = old_vtable_count +
7982 miranda_methods_.size() +
7983 default_methods_.size() +
7984 default_conflict_methods_.size();
7985
Vladimir Marko3068d582019-05-28 16:39:29 +01007986 ObjPtr<mirror::PointerArray> vtable = ObjPtr<mirror::PointerArray>::DownCast(
7987 mirror::Array::CopyOf(old_vtable, self_, new_vtable_count));
Vladimir Marko921094a2017-01-12 18:37:06 +00007988 if (UNLIKELY(vtable == nullptr)) {
7989 self_->AssertPendingOOMException();
7990 return nullptr;
7991 }
7992
7993 size_t vtable_pos = old_vtable_count;
7994 PointerSize pointer_size = class_linker_->GetImagePointerSize();
7995 // Update all the newly copied method's indexes so they denote their placement in the vtable.
7996 for (const ScopedArenaVector<ArtMethod*>& methods_vec : {default_methods_,
7997 default_conflict_methods_,
7998 miranda_methods_}) {
7999 // These are the functions that are not already in the vtable!
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008000 for (ArtMethod* new_vtable_method : methods_vec) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008001 // Leave the declaring class alone the method's dex_code_item_offset_ and dex_method_index_
8002 // fields are references into the dex file the method was defined in. Since the ArtMethod
8003 // does not store that information it uses declaring_class_->dex_cache_.
8004 new_vtable_method->SetMethodIndex(0xFFFF & vtable_pos);
8005 vtable->SetElementPtrSize(vtable_pos, new_vtable_method, pointer_size);
8006 ++vtable_pos;
8007 }
8008 }
8009 DCHECK_EQ(vtable_pos, new_vtable_count);
8010
8011 // Update old vtable methods. We use the default_translations map to figure out what each
8012 // vtable entry should be updated to, if they need to be at all.
8013 for (size_t i = 0; i < old_vtable_count; ++i) {
8014 ArtMethod* translated_method = vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
8015 // Try and find what we need to change this method to.
8016 auto translation_it = default_translations.find(i);
Vladimir Marko921094a2017-01-12 18:37:06 +00008017 if (translation_it != default_translations.end()) {
8018 if (translation_it->second.IsInConflict()) {
8019 // Find which conflict method we are to use for this method.
8020 MethodNameAndSignatureComparator old_method_comparator(
8021 translated_method->GetInterfaceMethodIfProxy(pointer_size));
8022 // We only need to look through overriding_default_conflict_methods since this is an
8023 // overridden method we are fixing up here.
8024 ArtMethod* new_conflict_method = FindSameNameAndSignature(
8025 old_method_comparator, overriding_default_conflict_methods_);
8026 CHECK(new_conflict_method != nullptr) << "Expected a conflict method!";
8027 translated_method = new_conflict_method;
8028 } else if (translation_it->second.IsAbstract()) {
8029 // Find which miranda method we are to use for this method.
8030 MethodNameAndSignatureComparator old_method_comparator(
8031 translated_method->GetInterfaceMethodIfProxy(pointer_size));
8032 ArtMethod* miranda_method = FindSameNameAndSignature(old_method_comparator,
8033 miranda_methods_);
8034 DCHECK(miranda_method != nullptr);
8035 translated_method = miranda_method;
8036 } else {
8037 // Normal default method (changed from an older default or abstract interface method).
8038 DCHECK(translation_it->second.IsTranslation());
8039 translated_method = translation_it->second.GetTranslation();
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008040 auto it = move_table_.find(translated_method);
8041 DCHECK(it != move_table_.end());
8042 translated_method = it->second;
Vladimir Marko921094a2017-01-12 18:37:06 +00008043 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008044 } else {
8045 auto it = move_table_.find(translated_method);
8046 translated_method = (it != move_table_.end()) ? it->second : nullptr;
Vladimir Marko921094a2017-01-12 18:37:06 +00008047 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008048
8049 if (translated_method != nullptr) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008050 // Make sure the new_methods index is set.
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008051 if (translated_method->GetMethodIndexDuringLinking() != i) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008052 if (kIsDebugBuild) {
8053 auto* methods = klass_->GetMethodsPtr();
8054 CHECK_LE(reinterpret_cast<uintptr_t>(&*methods->begin(method_size_, method_alignment_)),
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008055 reinterpret_cast<uintptr_t>(translated_method));
8056 CHECK_LT(reinterpret_cast<uintptr_t>(translated_method),
Vladimir Marko921094a2017-01-12 18:37:06 +00008057 reinterpret_cast<uintptr_t>(&*methods->end(method_size_, method_alignment_)));
8058 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008059 translated_method->SetMethodIndex(0xFFFF & i);
Vladimir Marko921094a2017-01-12 18:37:06 +00008060 }
Nicolas Geoffray0376a5c2017-01-12 15:15:45 +00008061 vtable->SetElementPtrSize(i, translated_method, pointer_size);
Vladimir Marko921094a2017-01-12 18:37:06 +00008062 }
8063 }
Vladimir Markod93e3742018-07-18 10:58:13 +01008064 klass_->SetVTable(vtable);
Vladimir Marko921094a2017-01-12 18:37:06 +00008065 return vtable;
8066}
8067
8068void ClassLinker::LinkInterfaceMethodsHelper::UpdateIfTable(Handle<mirror::IfTable> iftable) {
8069 PointerSize pointer_size = class_linker_->GetImagePointerSize();
8070 const size_t ifcount = klass_->GetIfTableCount();
8071 // Go fix up all the stale iftable pointers.
8072 for (size_t i = 0; i < ifcount; ++i) {
8073 for (size_t j = 0, count = iftable->GetMethodArrayCount(i); j < count; ++j) {
Vladimir Marko557fece2019-03-26 14:29:41 +00008074 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArray(i);
8075 ArtMethod* m = method_array->GetElementPtrSize<ArtMethod*>(j, pointer_size);
Vladimir Marko921094a2017-01-12 18:37:06 +00008076 DCHECK(m != nullptr) << klass_->PrettyClass();
8077 auto it = move_table_.find(m);
8078 if (it != move_table_.end()) {
8079 auto* new_m = it->second;
8080 DCHECK(new_m != nullptr) << klass_->PrettyClass();
8081 method_array->SetElementPtrSize(j, new_m, pointer_size);
8082 }
8083 }
8084 }
8085}
8086
8087void ClassLinker::LinkInterfaceMethodsHelper::UpdateIMT(ArtMethod** out_imt) {
8088 // Fix up IMT next.
8089 for (size_t i = 0; i < ImTable::kSize; ++i) {
8090 auto it = move_table_.find(out_imt[i]);
8091 if (it != move_table_.end()) {
8092 out_imt[i] = it->second;
8093 }
8094 }
8095}
8096
Alex Light705ad492015-09-21 11:36:30 -07008097// TODO This method needs to be split up into several smaller methods.
Alex Lighteb7c1442015-08-31 13:17:42 -07008098bool ClassLinker::LinkInterfaceMethods(
8099 Thread* self,
8100 Handle<mirror::Class> klass,
Vladimir Marko782fb712020-12-23 12:47:31 +00008101 const HashMap<size_t, ClassLinker::MethodTranslation>& default_translations,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008102 bool* out_new_conflict,
Alex Lighteb7c1442015-08-31 13:17:42 -07008103 ArtMethod** out_imt) {
8104 StackHandleScope<3> hs(self);
8105 Runtime* const runtime = Runtime::Current();
Alex Light705ad492015-09-21 11:36:30 -07008106
8107 const bool is_interface = klass->IsInterface();
Alex Lighteb7c1442015-08-31 13:17:42 -07008108 const bool has_superclass = klass->HasSuperClass();
Alex Light705ad492015-09-21 11:36:30 -07008109 const bool fill_tables = !is_interface;
Alex Lighteb7c1442015-08-31 13:17:42 -07008110 const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
Alex Lighteb7c1442015-08-31 13:17:42 -07008111 const size_t ifcount = klass->GetIfTableCount();
8112
Vladimir Marko921094a2017-01-12 18:37:06 +00008113 Handle<mirror::IfTable> iftable(hs.NewHandle(klass->GetIfTable()));
Mathieu Chartiere401d142015-04-22 13:56:20 -07008114
8115 MutableHandle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
8116 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
Alex Light9139e002015-10-09 15:59:48 -07008117 ArtMethod* const imt_conflict_method = runtime->GetImtConflictMethod();
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07008118 // Copy the IMT from the super class if possible.
Alex Light705ad492015-09-21 11:36:30 -07008119 const bool extend_super_iftable = has_superclass;
8120 if (has_superclass && fill_tables) {
8121 FillImtFromSuperClass(klass,
Alex Light705ad492015-09-21 11:36:30 -07008122 unimplemented_method,
8123 imt_conflict_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008124 out_new_conflict,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07008125 out_imt);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07008126 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07008127 // Allocate method arrays before since we don't want miss visiting miranda method roots due to
8128 // thread suspension.
Alex Light705ad492015-09-21 11:36:30 -07008129 if (fill_tables) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008130 if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8131 return false;
Mathieu Chartiere401d142015-04-22 13:56:20 -07008132 }
8133 }
8134
Vladimir Marko921094a2017-01-12 18:37:06 +00008135 LinkInterfaceMethodsHelper helper(this, klass, self, runtime);
8136
Igor Murashkinb1d8c312015-08-04 11:18:43 -07008137 auto* old_cause = self->StartAssertNoThreadSuspension(
Mathieu Chartiere401d142015-04-22 13:56:20 -07008138 "Copying ArtMethods for LinkInterfaceMethods");
Alex Light9139e002015-10-09 15:59:48 -07008139 // Going in reverse to ensure that we will hit abstract methods that override defaults before the
8140 // defaults. This means we don't need to do any trickery when creating the Miranda methods, since
8141 // they will already be null. This has the additional benefit that the declarer of a miranda
8142 // method will actually declare an abstract method.
Vladimir Markoba118822017-06-12 15:41:56 +01008143 for (size_t i = ifcount; i != 0u; ) {
Alex Light9139e002015-10-09 15:59:48 -07008144 --i;
Alex Light9139e002015-10-09 15:59:48 -07008145 DCHECK_LT(i, ifcount);
8146
Alex Light705ad492015-09-21 11:36:30 -07008147 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
Mathieu Chartiere401d142015-04-22 13:56:20 -07008148 if (num_methods > 0) {
8149 StackHandleScope<2> hs2(self);
8150 const bool is_super = i < super_ifcount;
8151 const bool super_interface = is_super && extend_super_iftable;
Alex Light705ad492015-09-21 11:36:30 -07008152 // We don't actually create or fill these tables for interfaces, we just copy some methods for
8153 // conflict methods. Just set this as nullptr in those cases.
8154 Handle<mirror::PointerArray> method_array(fill_tables
8155 ? hs2.NewHandle(iftable->GetMethodArray(i))
8156 : hs2.NewHandle<mirror::PointerArray>(nullptr));
Mathieu Chartiere401d142015-04-22 13:56:20 -07008157
Alex Lighte64300b2015-12-15 15:02:47 -08008158 ArraySlice<ArtMethod> input_virtual_methods;
Mathieu Chartier9865bde2015-12-21 09:58:16 -08008159 ScopedNullHandle<mirror::PointerArray> null_handle;
8160 Handle<mirror::PointerArray> input_vtable_array(null_handle);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008161 int32_t input_array_length = 0;
Alex Lighte64300b2015-12-15 15:02:47 -08008162
Alex Light9139e002015-10-09 15:59:48 -07008163 // TODO Cleanup Needed: In the presence of default methods this optimization is rather dirty
8164 // and confusing. Default methods should always look through all the superclasses
8165 // because they are the last choice of an implementation. We get around this by looking
8166 // at the super-classes iftable methods (copied into method_array previously) when we are
8167 // looking for the implementation of a super-interface method but that is rather dirty.
Alex Lighte64300b2015-12-15 15:02:47 -08008168 bool using_virtuals;
Alex Light705ad492015-09-21 11:36:30 -07008169 if (super_interface || is_interface) {
Alex Lighte64300b2015-12-15 15:02:47 -08008170 // If we are overwriting a super class interface, try to only virtual methods instead of the
Mathieu Chartiere401d142015-04-22 13:56:20 -07008171 // whole vtable.
Alex Lighte64300b2015-12-15 15:02:47 -08008172 using_virtuals = true;
Alex Lighta467a6e2020-03-23 16:07:29 -07008173 input_virtual_methods = klass->GetDeclaredVirtualMethodsSlice(image_pointer_size_);
Alex Lighte64300b2015-12-15 15:02:47 -08008174 input_array_length = input_virtual_methods.size();
Mathieu Chartiere401d142015-04-22 13:56:20 -07008175 } else {
Alex Lighte64300b2015-12-15 15:02:47 -08008176 // For a new interface, however, we need the whole vtable in case a new
8177 // interface method is implemented in the whole superclass.
8178 using_virtuals = false;
Andreas Gampefa4333d2017-02-14 11:10:34 -08008179 DCHECK(vtable != nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008180 input_vtable_array = vtable;
8181 input_array_length = input_vtable_array->GetLength();
8182 }
Alex Lighte64300b2015-12-15 15:02:47 -08008183
Alex Lighteb7c1442015-08-31 13:17:42 -07008184 // For each method in interface
Ian Rogers62d6c772013-02-27 08:32:07 -08008185 for (size_t j = 0; j < num_methods; ++j) {
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07008186 auto* interface_method = iftable->GetInterface(i)->GetVirtualMethod(j, image_pointer_size_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07008187 MethodNameAndSignatureComparator interface_name_comparator(
Mathieu Chartiere401d142015-04-22 13:56:20 -07008188 interface_method->GetInterfaceMethodIfProxy(image_pointer_size_));
David Srbeckye36e7f22018-11-14 14:21:23 +00008189 uint32_t imt_index = interface_method->GetImtIndex();
Alex Lighteb7c1442015-08-31 13:17:42 -07008190 ArtMethod** imt_ptr = &out_imt[imt_index];
Ian Rogers9bc81912012-10-11 21:43:36 -07008191 // For each method listed in the interface's method list, find the
8192 // matching method in our class's method list. We want to favor the
8193 // subclass over the superclass, which just requires walking
8194 // back from the end of the vtable. (This only matters if the
8195 // superclass defines a private method and this class redefines
8196 // it -- otherwise it would use the same vtable slot. In .dex files
8197 // those don't end up in the virtual method table, so it shouldn't
8198 // matter which direction we go. We walk it backward anyway.)
Alex Lighteb7c1442015-08-31 13:17:42 -07008199 //
8200 // To find defaults we need to do the same but also go over interfaces.
8201 bool found_impl = false;
Alex Light9139e002015-10-09 15:59:48 -07008202 ArtMethod* vtable_impl = nullptr;
Alex Lighteb7c1442015-08-31 13:17:42 -07008203 for (int32_t k = input_array_length - 1; k >= 0; --k) {
Alex Lighte64300b2015-12-15 15:02:47 -08008204 ArtMethod* vtable_method = using_virtuals ?
8205 &input_virtual_methods[k] :
Mathieu Chartiere401d142015-04-22 13:56:20 -07008206 input_vtable_array->GetElementPtrSize<ArtMethod*>(k, image_pointer_size_);
8207 ArtMethod* vtable_method_for_name_comparison =
8208 vtable_method->GetInterfaceMethodIfProxy(image_pointer_size_);
Alex Lighta467a6e2020-03-23 16:07:29 -07008209 DCHECK(!vtable_method->IsStatic()) << vtable_method->PrettyMethod();
Ian Rogers03b6eaf2014-10-28 09:34:57 -07008210 if (interface_name_comparator.HasSameNameAndSignature(
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07008211 vtable_method_for_name_comparison)) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -07008212 if (!vtable_method->IsAbstract() && !vtable_method->IsPublic()) {
Mathieu Chartier4d122c12015-06-17 14:14:36 -07008213 // Must do EndAssertNoThreadSuspension before throw since the throw can cause
8214 // allocations.
8215 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008216 ThrowIllegalAccessError(klass.Get(),
Brian Carlstromf3632832014-05-20 15:36:53 -07008217 "Method '%s' implementing interface method '%s' is not public",
David Sehr709b0702016-10-13 09:12:37 -07008218 vtable_method->PrettyMethod().c_str(),
8219 interface_method->PrettyMethod().c_str());
Ian Rogers9bc81912012-10-11 21:43:36 -07008220 return false;
Alex Light9139e002015-10-09 15:59:48 -07008221 } else if (UNLIKELY(vtable_method->IsOverridableByDefaultMethod())) {
Alex Lighteb7c1442015-08-31 13:17:42 -07008222 // We might have a newer, better, default method for this, so we just skip it. If we
8223 // are still using this we will select it again when scanning for default methods. To
8224 // obviate the need to copy the method again we will make a note that we already found
8225 // a default here.
8226 // TODO This should be much cleaner.
Alex Light9139e002015-10-09 15:59:48 -07008227 vtable_impl = vtable_method;
Alex Lighteb7c1442015-08-31 13:17:42 -07008228 break;
8229 } else {
8230 found_impl = true;
Alex Light705ad492015-09-21 11:36:30 -07008231 if (LIKELY(fill_tables)) {
8232 method_array->SetElementPtrSize(j, vtable_method, image_pointer_size_);
8233 // Place method in imt if entry is empty, place conflict otherwise.
8234 SetIMTRef(unimplemented_method,
8235 imt_conflict_method,
Alex Light705ad492015-09-21 11:36:30 -07008236 vtable_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008237 /*out*/out_new_conflict,
Alex Light705ad492015-09-21 11:36:30 -07008238 /*out*/imt_ptr);
8239 }
Ian Rogers9bc81912012-10-11 21:43:36 -07008240 break;
8241 }
8242 }
Alex Light9139e002015-10-09 15:59:48 -07008243 }
8244 // Continue on to the next method if we are done.
8245 if (LIKELY(found_impl)) {
8246 continue;
8247 } else if (LIKELY(super_interface)) {
8248 // Don't look for a default implementation when the super-method is implemented directly
8249 // by the class.
8250 //
8251 // See if we can use the superclasses method and skip searching everything else.
8252 // Note: !found_impl && super_interface
8253 CHECK(extend_super_iftable);
8254 // If this is a super_interface method it is possible we shouldn't override it because a
8255 // superclass could have implemented it directly. We get the method the superclass used
8256 // to implement this to know if we can override it with a default method. Doing this is
8257 // safe since we know that the super_iftable is filled in so we can simply pull it from
8258 // there. We don't bother if this is not a super-classes interface since in that case we
8259 // have scanned the entire vtable anyway and would have found it.
8260 // TODO This is rather dirty but it is faster than searching through the entire vtable
8261 // every time.
8262 ArtMethod* supers_method =
8263 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
8264 DCHECK(supers_method != nullptr);
8265 DCHECK(interface_name_comparator.HasSameNameAndSignature(supers_method));
Alex Light705ad492015-09-21 11:36:30 -07008266 if (LIKELY(!supers_method->IsOverridableByDefaultMethod())) {
Alex Light9139e002015-10-09 15:59:48 -07008267 // The method is not overridable by a default method (i.e. it is directly implemented
8268 // in some class). Therefore move onto the next interface method.
8269 continue;
Alex Lightd6c2bfa2016-05-02 18:51:34 -07008270 } else {
8271 // If the super-classes method is override-able by a default method we need to keep
8272 // track of it since though it is override-able it is not guaranteed to be 'overridden'.
8273 // If it turns out not to be overridden and we did not keep track of it we might add it
Alex Light66630be2016-05-04 09:23:09 -07008274 // to the vtable twice, causing corruption (vtable entries having inconsistent and
8275 // illegal states, incorrect vtable size, and incorrect or inconsistent iftable entries)
8276 // in this class and any subclasses.
Alex Lightd6c2bfa2016-05-02 18:51:34 -07008277 DCHECK(vtable_impl == nullptr || vtable_impl == supers_method)
David Sehr709b0702016-10-13 09:12:37 -07008278 << "vtable_impl was " << ArtMethod::PrettyMethod(vtable_impl)
8279 << " and not 'nullptr' or "
8280 << supers_method->PrettyMethod()
8281 << " as expected. IFTable appears to be corrupt!";
Alex Lightd6c2bfa2016-05-02 18:51:34 -07008282 vtable_impl = supers_method;
Alex Light9139e002015-10-09 15:59:48 -07008283 }
8284 }
8285 // If we haven't found it yet we should search through the interfaces for default methods.
Vladimir Marko921094a2017-01-12 18:37:06 +00008286 ArtMethod* current_method = helper.FindMethod(interface_method,
8287 interface_name_comparator,
8288 vtable_impl);
Alex Light705ad492015-09-21 11:36:30 -07008289 if (LIKELY(fill_tables)) {
Alex Light12771082016-01-26 16:07:41 -08008290 if (current_method == nullptr && !super_interface) {
Alex Light705ad492015-09-21 11:36:30 -07008291 // We could not find an implementation for this method and since it is a brand new
8292 // interface we searched the entire vtable (and all default methods) for an
8293 // implementation but couldn't find one. We therefore need to make a miranda method.
Vladimir Marko921094a2017-01-12 18:37:06 +00008294 current_method = helper.GetOrCreateMirandaMethod(interface_method,
8295 interface_name_comparator);
Alex Light12771082016-01-26 16:07:41 -08008296 }
8297
8298 if (current_method != nullptr) {
8299 // We found a default method implementation. Record it in the iftable and IMT.
8300 method_array->SetElementPtrSize(j, current_method, image_pointer_size_);
8301 SetIMTRef(unimplemented_method,
8302 imt_conflict_method,
Alex Light12771082016-01-26 16:07:41 -08008303 current_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008304 /*out*/out_new_conflict,
Alex Light12771082016-01-26 16:07:41 -08008305 /*out*/imt_ptr);
Alex Light9139e002015-10-09 15:59:48 -07008306 }
8307 }
Alex Light705ad492015-09-21 11:36:30 -07008308 } // For each method in interface end.
8309 } // if (num_methods > 0)
8310 } // For each interface.
Alex Light705ad492015-09-21 11:36:30 -07008311 // TODO don't extend virtuals of interface unless necessary (when is it?).
Vladimir Marko921094a2017-01-12 18:37:06 +00008312 if (helper.HasNewVirtuals()) {
8313 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8314 helper.ReallocMethods(); // No return value to check. Native allocation failure aborts.
8315 LengthPrefixedArray<ArtMethod>* methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8316
Mathieu Chartierd4d83b82015-06-19 20:24:45 -07008317 // Done copying methods, they are all roots in the class now, so we can end the no thread
Mathieu Chartiere401d142015-04-22 13:56:20 -07008318 // suspension assert.
8319 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartierd4d83b82015-06-19 20:24:45 -07008320
Alex Light705ad492015-09-21 11:36:30 -07008321 if (fill_tables) {
Vladimir Marko3068d582019-05-28 16:39:29 +01008322 vtable.Assign(helper.UpdateVtable(default_translations, vtable));
Andreas Gampefa4333d2017-02-14 11:10:34 -08008323 if (UNLIKELY(vtable == nullptr)) {
Vladimir Marko921094a2017-01-12 18:37:06 +00008324 // The helper has already called self->AssertPendingOOMException();
Alex Light705ad492015-09-21 11:36:30 -07008325 return false;
8326 }
Vladimir Marko921094a2017-01-12 18:37:06 +00008327 helper.UpdateIfTable(iftable);
8328 helper.UpdateIMT(out_imt);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008329 }
Alex Light705ad492015-09-21 11:36:30 -07008330
Vladimir Marko921094a2017-01-12 18:37:06 +00008331 helper.CheckNoStaleMethodsInDexCache();
8332 helper.ClobberOldMethods(old_methods, methods);
Mathieu Chartiere401d142015-04-22 13:56:20 -07008333 } else {
8334 self->EndAssertNoThreadSuspension(old_cause);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008335 }
Alex Light705ad492015-09-21 11:36:30 -07008336 if (kIsDebugBuild && !is_interface) {
Orion Hodson5880c772020-07-28 20:12:08 +01008337 CheckVTable(self, klass, image_pointer_size_);
Elliott Hughes4681c802011-09-25 18:04:37 -07008338 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008339 return true;
8340}
8341
Vladimir Marko42bee502021-01-28 14:58:35 +00008342class ClassLinker::LinkFieldsHelper {
8343 public:
8344 static bool LinkFields(ClassLinker* class_linker,
8345 Thread* self,
8346 Handle<mirror::Class> klass,
8347 bool is_static,
8348 size_t* class_size)
8349 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom4873d462011-08-21 15:23:39 -07008350
Vladimir Marko42bee502021-01-28 14:58:35 +00008351 private:
8352 enum class FieldTypeOrder : uint16_t;
8353 class FieldGaps;
8354
8355 struct FieldTypeOrderAndIndex {
8356 FieldTypeOrder field_type_order;
8357 uint16_t field_index;
8358 };
8359
8360 static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
8361
8362 template <size_t kSize>
8363 static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
8364 REQUIRES_SHARED(Locks::mutator_lock_);
8365};
Brian Carlstrom4873d462011-08-21 15:23:39 -07008366
Vladimir Markoc7993d52021-01-27 15:20:56 +00008367// We use the following order of field types for assigning offsets.
8368// Some fields can be shuffled forward to fill gaps, see `ClassLinker::LinkFields()`.
Vladimir Marko42bee502021-01-28 14:58:35 +00008369enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008370 kReference = 0u,
8371 kLong,
8372 kDouble,
8373 kInt,
8374 kFloat,
8375 kChar,
8376 kShort,
8377 kBoolean,
8378 kByte,
8379
8380 kLast64BitType = kDouble,
8381 kLast32BitType = kFloat,
8382 kLast16BitType = kShort,
Brian Carlstromdbc05252011-09-09 01:59:59 -07008383};
8384
Vladimir Markoc7993d52021-01-27 15:20:56 +00008385ALWAYS_INLINE
Vladimir Marko42bee502021-01-28 14:58:35 +00008386ClassLinker::LinkFieldsHelper::FieldTypeOrder
8387ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008388 switch (first_char) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008389 case 'J':
8390 return FieldTypeOrder::kLong;
8391 case 'D':
8392 return FieldTypeOrder::kDouble;
8393 case 'I':
8394 return FieldTypeOrder::kInt;
8395 case 'F':
8396 return FieldTypeOrder::kFloat;
8397 case 'C':
8398 return FieldTypeOrder::kChar;
8399 case 'S':
8400 return FieldTypeOrder::kShort;
8401 case 'Z':
8402 return FieldTypeOrder::kBoolean;
8403 case 'B':
8404 return FieldTypeOrder::kByte;
Vladimir Marko42bee502021-01-28 14:58:35 +00008405 default:
8406 DCHECK(first_char == 'L' || first_char == '[') << first_char;
8407 return FieldTypeOrder::kReference;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008408 }
8409}
8410
8411// Gaps where we can insert fields in object layout.
Vladimir Marko42bee502021-01-28 14:58:35 +00008412class ClassLinker::LinkFieldsHelper::FieldGaps {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008413 public:
8414 template <uint32_t kSize>
8415 ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
8416 static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
8417 if (!IsAligned<kSize>(field_offset.Uint32Value())) {
8418 uint32_t gap_start = field_offset.Uint32Value();
8419 field_offset = MemberOffset(RoundUp(gap_start, kSize));
8420 AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
8421 }
8422 return field_offset;
8423 }
8424
8425 template <uint32_t kSize>
8426 bool HasGap() const {
8427 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8428 return (kSize == 1u && gap1_offset_ != kNoOffset) ||
8429 (kSize <= 2u && gap2_offset_ != kNoOffset) ||
8430 gap4_offset_ != kNoOffset;
8431 }
8432
8433 template <uint32_t kSize>
8434 MemberOffset ReleaseGap() {
8435 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8436 uint32_t result;
8437 if (kSize == 1u && gap1_offset_ != kNoOffset) {
8438 DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
8439 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
8440 result = gap1_offset_;
8441 gap1_offset_ = kNoOffset;
8442 } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
8443 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
8444 result = gap2_offset_;
8445 gap2_offset_ = kNoOffset;
8446 if (kSize < 2u) {
8447 AddGaps<1u>(result + kSize, result + 2u);
8448 }
8449 } else {
8450 DCHECK_NE(gap4_offset_, kNoOffset);
8451 result = gap4_offset_;
8452 gap4_offset_ = kNoOffset;
8453 if (kSize < 4u) {
8454 AddGaps<kSize | 2u>(result + kSize, result + 4u);
8455 }
8456 }
8457 return MemberOffset(result);
8458 }
8459
8460 private:
8461 template <uint32_t kGapsToCheck>
8462 void AddGaps(uint32_t gap_start, uint32_t gap_end) {
8463 if ((kGapsToCheck & 1u) != 0u) {
8464 DCHECK_LT(gap_start, gap_end);
8465 DCHECK_ALIGNED(gap_end, 2u);
8466 if ((gap_start & 1u) != 0u) {
8467 DCHECK_EQ(gap1_offset_, kNoOffset);
8468 gap1_offset_ = gap_start;
8469 gap_start += 1u;
8470 if (kGapsToCheck == 1u || gap_start == gap_end) {
8471 DCHECK_EQ(gap_start, gap_end);
8472 return;
8473 }
8474 }
8475 }
8476
8477 if ((kGapsToCheck & 2u) != 0u) {
8478 DCHECK_LT(gap_start, gap_end);
8479 DCHECK_ALIGNED(gap_start, 2u);
8480 DCHECK_ALIGNED(gap_end, 4u);
8481 if ((gap_start & 2u) != 0u) {
8482 DCHECK_EQ(gap2_offset_, kNoOffset);
8483 gap2_offset_ = gap_start;
8484 gap_start += 2u;
8485 if (kGapsToCheck <= 3u || gap_start == gap_end) {
8486 DCHECK_EQ(gap_start, gap_end);
8487 return;
8488 }
8489 }
8490 }
8491
8492 if ((kGapsToCheck & 4u) != 0u) {
8493 DCHECK_LT(gap_start, gap_end);
8494 DCHECK_ALIGNED(gap_start, 4u);
8495 DCHECK_ALIGNED(gap_end, 8u);
8496 DCHECK_EQ(gap_start + 4u, gap_end);
8497 DCHECK_EQ(gap4_offset_, kNoOffset);
8498 gap4_offset_ = gap_start;
8499 return;
8500 }
8501
8502 DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
8503 << " after checking " << kGapsToCheck;
8504 }
8505
8506 static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
8507
8508 uint32_t gap4_offset_ = kNoOffset;
8509 uint32_t gap2_offset_ = kNoOffset;
8510 uint32_t gap1_offset_ = kNoOffset;
8511};
8512
8513template <size_t kSize>
Vladimir Marko42bee502021-01-28 14:58:35 +00008514ALWAYS_INLINE
8515MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
8516 MemberOffset field_offset) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008517 DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
8518 DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
8519 field->SetOffset(field_offset);
8520 return MemberOffset(field_offset.Uint32Value() + kSize);
8521}
8522
Vladimir Marko42bee502021-01-28 14:58:35 +00008523bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
8524 Thread* self,
8525 Handle<mirror::Class> klass,
8526 bool is_static,
8527 size_t* class_size) {
Ian Rogers7b078e82014-09-10 14:44:24 -07008528 self->AllowThreadSuspension();
Mathieu Chartierc7853442015-03-27 14:35:38 -07008529 const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008530 LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
8531 klass->GetIFieldsPtr();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008532
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008533 // Initialize field_offset
Brian Carlstrom693267a2011-09-06 09:25:34 -07008534 MemberOffset field_offset(0);
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008535 if (is_static) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008536 field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
8537 class_linker->GetImagePointerSize());
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008538 } else {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008539 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07008540 if (super_class != nullptr) {
Brian Carlstromf3632832014-05-20 15:36:53 -07008541 CHECK(super_class->IsResolved())
David Sehr709b0702016-10-13 09:12:37 -07008542 << klass->PrettyClass() << " " << super_class->PrettyClass();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008543 field_offset = MemberOffset(super_class->GetObjectSize());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008544 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008545 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008546
David Sehr709b0702016-10-13 09:12:37 -07008547 CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008548
Brian Carlstromdbc05252011-09-09 01:59:59 -07008549 // we want a relatively stable order so that adding new fields
Elliott Hughesadb460d2011-10-05 17:02:34 -07008550 // minimizes disruption of C++ version such as Class and Method.
Alex Lighte64300b2015-12-15 15:02:47 -08008551 //
8552 // The overall sort order order is:
8553 // 1) All object reference fields, sorted alphabetically.
8554 // 2) All java long (64-bit) integer fields, sorted alphabetically.
8555 // 3) All java double (64-bit) floating point fields, sorted alphabetically.
8556 // 4) All java int (32-bit) integer fields, sorted alphabetically.
8557 // 5) All java float (32-bit) floating point fields, sorted alphabetically.
8558 // 6) All java char (16-bit) integer fields, sorted alphabetically.
8559 // 7) All java short (16-bit) integer fields, sorted alphabetically.
8560 // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
8561 // 9) All java byte (8-bit) integer fields, sorted alphabetically.
8562 //
Vladimir Marko42bee502021-01-28 14:58:35 +00008563 // (References are first to increase the chance of reference visiting
8564 // being able to take a fast path using a bitmap of references at the
8565 // start of the object, see `Class::reference_instance_offsets_`.)
8566 //
Vladimir Markoc7993d52021-01-27 15:20:56 +00008567 // Once the fields are sorted in this order we will attempt to fill any gaps
8568 // that might be present in the memory layout of the structure.
8569 // Note that we shall not fill gaps between the superclass fields.
8570
8571 // Collect fields and their "type order index" (see numbered points above).
Mathieu Chartier2d5f39e2014-09-19 17:52:37 -07008572 const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
Vladimir Markoc7993d52021-01-27 15:20:56 +00008573 "Using plain ArtField references");
Vladimir Marko42bee502021-01-28 14:58:35 +00008574 constexpr size_t kStackBufferEntries = 64; // Avoid allocations for small number of fields.
8575 FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
8576 std::vector<FieldTypeOrderAndIndex> heap_buffer;
8577 ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008578 if (num_fields <= kStackBufferEntries) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008579 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008580 } else {
8581 heap_buffer.resize(num_fields);
Vladimir Marko42bee502021-01-28 14:58:35 +00008582 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
Brian Carlstromdbc05252011-09-09 01:59:59 -07008583 }
Brian Carlstromdbc05252011-09-09 01:59:59 -07008584 size_t num_reference_fields = 0;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008585 size_t primitive_fields_start = num_fields;
Vladimir Marko42bee502021-01-28 14:58:35 +00008586 DCHECK_LE(num_fields, 1u << 16);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008587 for (size_t i = 0; i != num_fields; ++i) {
8588 ArtField* field = &fields->At(i);
8589 const char* descriptor = field->GetTypeDescriptor();
Vladimir Marko42bee502021-01-28 14:58:35 +00008590 FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
8591 uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008592 // Insert references to the start, other fields to the end.
8593 DCHECK_LT(num_reference_fields, primitive_fields_start);
Vladimir Marko42bee502021-01-28 14:58:35 +00008594 if (field_type_order == FieldTypeOrder::kReference) {
8595 sorted_fields[num_reference_fields] = { field_type_order, field_index };
Vladimir Markoc7993d52021-01-27 15:20:56 +00008596 ++num_reference_fields;
8597 } else {
8598 --primitive_fields_start;
Vladimir Marko42bee502021-01-28 14:58:35 +00008599 sorted_fields[primitive_fields_start] = { field_type_order, field_index };
Vladimir Markoc7993d52021-01-27 15:20:56 +00008600 }
8601 }
8602 DCHECK_EQ(num_reference_fields, primitive_fields_start);
Fred Shih381e4ca2014-08-25 17:24:27 -07008603
Vladimir Marko42bee502021-01-28 14:58:35 +00008604 // Reference fields are already sorted by field index (and dex field index).
Vladimir Markoc7993d52021-01-27 15:20:56 +00008605 DCHECK(std::is_sorted(
8606 sorted_fields.begin(),
8607 sorted_fields.begin() + num_reference_fields,
Vladimir Marko42bee502021-01-28 14:58:35 +00008608 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8609 ArtField* lhs_field = &fields->At(lhs.field_index);
8610 ArtField* rhs_field = &fields->At(rhs.field_index);
8611 CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8612 CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8613 CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
8614 lhs.field_index < rhs.field_index);
8615 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008616 }));
Vladimir Marko42bee502021-01-28 14:58:35 +00008617 // Primitive fields were stored in reverse order of their field index (and dex field index).
Vladimir Markoc7993d52021-01-27 15:20:56 +00008618 DCHECK(std::is_sorted(
8619 sorted_fields.begin() + primitive_fields_start,
8620 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00008621 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8622 ArtField* lhs_field = &fields->At(lhs.field_index);
8623 ArtField* rhs_field = &fields->At(rhs.field_index);
8624 CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8625 CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8626 CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
8627 lhs.field_index > rhs.field_index);
8628 return lhs.field_index > rhs.field_index;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008629 }));
8630 // Sort the primitive fields by the field type order, then field index.
8631 std::sort(sorted_fields.begin() + primitive_fields_start,
8632 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00008633 [](const auto& lhs, const auto& rhs) {
8634 if (lhs.field_type_order != rhs.field_type_order) {
8635 return lhs.field_type_order < rhs.field_type_order;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008636 } else {
Vladimir Marko42bee502021-01-28 14:58:35 +00008637 return lhs.field_index < rhs.field_index;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008638 }
8639 });
8640 // Primitive fields are now sorted by field size (descending), then type, then field index.
8641 DCHECK(std::is_sorted(
8642 sorted_fields.begin() + primitive_fields_start,
8643 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00008644 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8645 ArtField* lhs_field = &fields->At(lhs.field_index);
8646 ArtField* rhs_field = &fields->At(rhs.field_index);
8647 Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008648 CHECK_NE(lhs_type, Primitive::kPrimNot);
Vladimir Marko42bee502021-01-28 14:58:35 +00008649 Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008650 CHECK_NE(rhs_type, Primitive::kPrimNot);
8651 if (lhs_type != rhs_type) {
8652 size_t lhs_size = Primitive::ComponentSize(lhs_type);
8653 size_t rhs_size = Primitive::ComponentSize(rhs_type);
8654 return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
8655 } else {
Vladimir Marko42bee502021-01-28 14:58:35 +00008656 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008657 }
8658 }));
8659
8660 // Process reference fields.
8661 FieldGaps field_gaps;
8662 size_t index = 0u;
8663 if (num_reference_fields != 0u) {
8664 constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
8665 field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
8666 for (; index != num_reference_fields; ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008667 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008668 field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008669 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008670 }
Vladimir Markoc7993d52021-01-27 15:20:56 +00008671 // Process 64-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00008672 if (index != num_fields &&
8673 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008674 field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
Vladimir Marko42bee502021-01-28 14:58:35 +00008675 while (index != num_fields &&
8676 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
8677 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008678 field_offset = AssignFieldOffset<8u>(field, field_offset);
8679 ++index;
Mathieu Chartier55650292020-09-14 12:21:04 -07008680 }
Mathieu Chartier55650292020-09-14 12:21:04 -07008681 }
Vladimir Markoc7993d52021-01-27 15:20:56 +00008682 // Process 32-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00008683 if (index != num_fields &&
8684 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008685 field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
8686 if (field_gaps.HasGap<4u>()) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008687 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008688 AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>()); // Ignore return value.
8689 ++index;
8690 DCHECK(!field_gaps.HasGap<4u>()); // There can be only one gap for a 32-bit field.
8691 }
Vladimir Marko42bee502021-01-28 14:58:35 +00008692 while (index != num_fields &&
8693 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
8694 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008695 field_offset = AssignFieldOffset<4u>(field, field_offset);
8696 ++index;
8697 }
8698 }
8699 // Process 16-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00008700 if (index != num_fields &&
8701 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008702 field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
8703 while (index != num_fields &&
Vladimir Marko42bee502021-01-28 14:58:35 +00008704 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
Vladimir Markoc7993d52021-01-27 15:20:56 +00008705 field_gaps.HasGap<2u>()) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008706 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008707 AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>()); // Ignore return value.
8708 ++index;
8709 }
Vladimir Marko42bee502021-01-28 14:58:35 +00008710 while (index != num_fields &&
8711 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
8712 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008713 field_offset = AssignFieldOffset<2u>(field, field_offset);
8714 ++index;
8715 }
8716 }
8717 // Process 8-bit fields.
8718 for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008719 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008720 AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>()); // Ignore return value.
8721 }
8722 for (; index != num_fields; ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008723 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008724 field_offset = AssignFieldOffset<1u>(field, field_offset);
8725 }
8726
Ian Rogers7b078e82014-09-10 14:44:24 -07008727 self->EndAssertNoThreadSuspension(old_no_suspend_cause);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008728
Elliott Hughesadb460d2011-10-05 17:02:34 -07008729 // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
Vladimir Marko42bee502021-01-28 14:58:35 +00008730 DCHECK(!class_linker->init_done_ || !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
8731 if (!is_static &&
8732 UNLIKELY(!class_linker->init_done_) &&
8733 klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
Elliott Hughesadb460d2011-10-05 17:02:34 -07008734 // We know there are no non-reference fields in the Reference classes, and we know
8735 // that 'referent' is alphabetically last, so this is easy...
David Sehr709b0702016-10-13 09:12:37 -07008736 CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008737 CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
David Sehr709b0702016-10-13 09:12:37 -07008738 << klass->PrettyClass();
Elliott Hughesadb460d2011-10-05 17:02:34 -07008739 --num_reference_fields;
8740 }
8741
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008742 size_t size = field_offset.Uint32Value();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008743 // Update klass
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008744 if (is_static) {
8745 klass->SetNumReferenceStaticFields(num_reference_fields);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008746 *class_size = size;
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008747 } else {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008748 klass->SetNumReferenceInstanceFields(num_reference_fields);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008749 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008750 if (num_reference_fields == 0 || super_class == nullptr) {
8751 // object has one reference field, klass, but we ignore it since we always visit the class.
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008752 // super_class is null iff the class is java.lang.Object.
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008753 if (super_class == nullptr ||
8754 (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
8755 klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008756 }
8757 }
8758 if (kIsDebugBuild) {
8759 DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
8760 size_t total_reference_instance_fields = 0;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008761 ObjPtr<mirror::Class> cur_super = klass.Get();
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008762 while (cur_super != nullptr) {
8763 total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
8764 cur_super = cur_super->GetSuperClass();
8765 }
8766 if (super_class == nullptr) {
David Sehr709b0702016-10-13 09:12:37 -07008767 CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07008768 } else {
8769 // Check that there is at least num_reference_fields other than Object.class.
8770 CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
David Sehr709b0702016-10-13 09:12:37 -07008771 << klass->PrettyClass();
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07008772 }
8773 }
Brian Carlstromdbc05252011-09-09 01:59:59 -07008774 if (!klass->IsVariableSize()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07008775 std::string temp;
8776 DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
8777 size_t previous_size = klass->GetObjectSize();
8778 if (previous_size != 0) {
8779 // Make sure that we didn't originally have an incorrect size.
8780 CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
Mathieu Chartier79b4f382013-10-23 15:21:37 -07008781 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07008782 klass->SetObjectSize(size);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008783 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008784 }
Vladimir Marko76649e82014-11-10 18:32:59 +00008785
8786 if (kIsDebugBuild) {
8787 // Make sure that the fields array is ordered by name but all reference
8788 // offsets are at the beginning as far as alignment allows.
8789 MemberOffset start_ref_offset = is_static
Vladimir Marko42bee502021-01-28 14:58:35 +00008790 ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
Vladimir Marko76649e82014-11-10 18:32:59 +00008791 : klass->GetFirstReferenceInstanceFieldOffset();
8792 MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
8793 num_reference_fields *
8794 sizeof(mirror::HeapReference<mirror::Object>));
8795 MemberOffset current_ref_offset = start_ref_offset;
8796 for (size_t i = 0; i < num_fields; i++) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008797 ArtField* field = &fields->At(i);
Mathieu Chartierc7853442015-03-27 14:35:38 -07008798 VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
David Sehr709b0702016-10-13 09:12:37 -07008799 << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
8800 << " offset=" << field->GetOffsetDuringLinking();
Vladimir Marko76649e82014-11-10 18:32:59 +00008801 if (i != 0) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008802 ArtField* const prev_field = &fields->At(i - 1);
Vladimir Marko7a7c1db2014-11-17 15:13:34 +00008803 // NOTE: The field names can be the same. This is not possible in the Java language
8804 // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008805 DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
Vladimir Marko76649e82014-11-10 18:32:59 +00008806 }
8807 Primitive::Type type = field->GetTypeAsPrimitiveType();
8808 bool is_primitive = type != Primitive::kPrimNot;
8809 if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
8810 strcmp("referent", field->GetName()) == 0) {
8811 is_primitive = true; // We lied above, so we have to expect a lie here.
8812 }
8813 MemberOffset offset = field->GetOffsetDuringLinking();
8814 if (is_primitive) {
8815 if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
8816 // Shuffled before references.
8817 size_t type_size = Primitive::ComponentSize(type);
8818 CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
8819 CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
8820 CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
8821 CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
8822 }
8823 } else {
8824 CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
8825 current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
8826 sizeof(mirror::HeapReference<mirror::Object>));
8827 }
8828 }
8829 CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
8830 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008831 return true;
8832}
8833
Vladimir Marko42bee502021-01-28 14:58:35 +00008834bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
8835 CHECK(klass != nullptr);
8836 return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
8837}
8838
8839bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
8840 CHECK(klass != nullptr);
8841 return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
8842}
8843
Vladimir Marko76649e82014-11-10 18:32:59 +00008844// Set the bitmap of reference instance field offsets.
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07008845void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008846 uint32_t reference_offsets = 0;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008847 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008848 // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
Andreas Gampe2ed8def2014-08-28 14:41:02 -07008849 if (super_class != nullptr) {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008850 reference_offsets = super_class->GetReferenceInstanceOffsets();
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008851 // Compute reference offsets unless our superclass overflowed.
8852 if (reference_offsets != mirror::Class::kClassWalkSuper) {
8853 size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
Vladimir Marko76649e82014-11-10 18:32:59 +00008854 if (num_reference_fields != 0u) {
8855 // All of the fields that contain object references are guaranteed be grouped in memory
8856 // starting at an appropriately aligned address after super class object data.
8857 uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
8858 sizeof(mirror::HeapReference<mirror::Object>));
8859 uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008860 sizeof(mirror::HeapReference<mirror::Object>);
Vladimir Marko76649e82014-11-10 18:32:59 +00008861 if (start_bit + num_reference_fields > 32) {
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008862 reference_offsets = mirror::Class::kClassWalkSuper;
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008863 } else {
Vladimir Marko76649e82014-11-10 18:32:59 +00008864 reference_offsets |= (0xffffffffu << start_bit) &
8865 (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07008866 }
8867 }
Brian Carlstrom4873d462011-08-21 15:23:39 -07008868 }
8869 }
Mingyao Yangfaff0f02014-09-10 12:03:22 -07008870 klass->SetReferenceInstanceOffsets(reference_offsets);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008871}
8872
Vladimir Marko18090d12018-06-01 16:53:12 +01008873ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
8874 ObjPtr<mirror::DexCache> dex_cache) {
8875 StackHandleScope<1> hs(Thread::Current());
8876 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
8877 return DoResolveString(string_idx, h_dex_cache);
8878}
8879
8880ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
8881 Handle<mirror::DexCache> dex_cache) {
Vladimir Markoa64b52d2017-12-08 16:27:49 +00008882 const DexFile& dex_file = *dex_cache->GetDexFile();
Ian Rogersdfb325e2013-10-30 01:00:44 -07008883 uint32_t utf16_length;
8884 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008885 ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00008886 if (string != nullptr) {
8887 dex_cache->SetResolvedString(string_idx, string);
8888 }
Vladimir Marko28e012a2017-12-07 11:22:59 +00008889 return string;
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07008890}
8891
Vladimir Marko18090d12018-06-01 16:53:12 +01008892ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
8893 ObjPtr<mirror::DexCache> dex_cache) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08008894 DCHECK(dex_cache != nullptr);
Vladimir Markoa64b52d2017-12-08 16:27:49 +00008895 const DexFile& dex_file = *dex_cache->GetDexFile();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008896 uint32_t utf16_length;
8897 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08008898 ObjPtr<mirror::String> string =
8899 intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008900 if (string != nullptr) {
8901 dex_cache->SetResolvedString(string_idx, string);
8902 }
Vladimir Marko28e012a2017-12-07 11:22:59 +00008903 return string;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00008904}
8905
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008906ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
Vladimir Marko09c5ca42018-05-31 15:15:31 +01008907 ObjPtr<mirror::Class> referrer) {
8908 return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
8909}
8910
8911ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008912 ObjPtr<mirror::DexCache> dex_cache,
8913 ObjPtr<mirror::ClassLoader> class_loader) {
8914 const DexFile& dex_file = *dex_cache->GetDexFile();
8915 const char* descriptor = dex_file.StringByTypeIdx(type_idx);
Vladimir Markoc63d9672021-03-31 15:50:39 +01008916 ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
8917 if (type != nullptr) {
8918 DCHECK(type->IsResolved());
8919 dex_cache->SetResolvedType(type_idx, type);
8920 }
8921 return type;
8922}
8923
8924ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(const char* descriptor,
8925 ObjPtr<mirror::ClassLoader> class_loader) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008926 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
8927 ObjPtr<mirror::Class> type = nullptr;
8928 if (descriptor[1] == '\0') {
8929 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
8930 // for primitive classes that aren't backed by dex files.
Vladimir Marko9186b182018-11-06 14:55:54 +00008931 type = LookupPrimitiveClass(descriptor[0]);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008932 } else {
8933 Thread* const self = Thread::Current();
8934 DCHECK(self != nullptr);
8935 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
8936 // Find the class in the loaded classes table.
Vladimir Markobcf17522018-06-01 13:14:32 +01008937 type = LookupClass(self, descriptor, hash, class_loader);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008938 }
Vladimir Markoc63d9672021-03-31 15:50:39 +01008939 return (type != nullptr && type->IsResolved()) ? type : nullptr;
Mathieu Chartierb8901302016-09-30 10:27:43 -07008940}
8941
Andreas Gampeb0625e02019-05-01 12:43:31 -07008942template <typename RefType>
8943ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
Vladimir Marko09c5ca42018-05-31 15:15:31 +01008944 StackHandleScope<2> hs(Thread::Current());
8945 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
8946 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
8947 return DoResolveType(type_idx, dex_cache, class_loader);
8948}
8949
Andreas Gampe4835d212018-11-21 14:55:10 -08008950// Instantiate the above.
8951template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8952 ArtField* referrer);
8953template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8954 ArtMethod* referrer);
8955template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8956 ObjPtr<mirror::Class> referrer);
8957
Vladimir Marko09c5ca42018-05-31 15:15:31 +01008958ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008959 Handle<mirror::DexCache> dex_cache,
8960 Handle<mirror::ClassLoader> class_loader) {
8961 Thread* self = Thread::Current();
8962 const char* descriptor = dex_cache->GetDexFile()->StringByTypeIdx(type_idx);
8963 ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
8964 if (resolved != nullptr) {
8965 // TODO: we used to throw here if resolved's class loader was not the
8966 // boot class loader. This was to permit different classes with the
8967 // same name to be loaded simultaneously by different loaders
8968 dex_cache->SetResolvedType(type_idx, resolved);
8969 } else {
8970 CHECK(self->IsExceptionPending())
8971 << "Expected pending exception for failed resolution of: " << descriptor;
8972 // Convert a ClassNotFoundException to a NoClassDefFoundError.
8973 StackHandleScope<1> hs(self);
8974 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01008975 if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00008976 DCHECK(resolved == nullptr); // No Handle needed to preserve resolved.
8977 self->ClearException();
8978 ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
8979 self->GetException()->SetCause(cause.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008980 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008981 }
Vladimir Marko72ab6842017-01-20 19:32:50 +00008982 DCHECK((resolved == nullptr) || resolved->IsResolved())
David Sehr709b0702016-10-13 09:12:37 -07008983 << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
Vladimir Marko28e012a2017-12-07 11:22:59 +00008984 return resolved;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07008985}
8986
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00008987ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
8988 ObjPtr<mirror::DexCache> dex_cache,
8989 ObjPtr<mirror::ClassLoader> class_loader,
8990 uint32_t method_idx) {
8991 // Search for the method using dex_cache and method_idx. The Class::Find*Method()
8992 // functions can optimize the search if the dex_cache is the same as the DexCache
8993 // of the class, with fall-back to name and signature search otherwise.
8994 ArtMethod* resolved = nullptr;
8995 if (klass->IsInterface()) {
8996 resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
8997 } else {
8998 resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
8999 }
9000 DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009001 if (resolved != nullptr &&
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009002 // We pass AccessMethod::kNone instead of kLinking to not warn yet on the
9003 // access, as we'll be looking if the method can be accessed through an
9004 // interface.
David Brazdilf50ac102018-10-17 18:00:06 +01009005 hiddenapi::ShouldDenyAccessToMember(resolved,
9006 hiddenapi::AccessContext(class_loader, dex_cache),
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009007 hiddenapi::AccessMethod::kNone)) {
9008 // The resolved method that we have found cannot be accessed due to
9009 // hiddenapi (typically it is declared up the hierarchy and is not an SDK
9010 // method). Try to find an interface method from the implemented interfaces which is
Nicolas Geoffrayaf61f502021-03-31 16:03:50 +00009011 // part of the SDK.
9012 ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009013 if (itf_method == nullptr) {
9014 // No interface method. Call ShouldDenyAccessToMember again but this time
9015 // with AccessMethod::kLinking to ensure that an appropriate warning is
9016 // logged.
9017 hiddenapi::ShouldDenyAccessToMember(resolved,
9018 hiddenapi::AccessContext(class_loader, dex_cache),
9019 hiddenapi::AccessMethod::kLinking);
9020 resolved = nullptr;
9021 } else {
9022 // We found an interface method that is accessible, continue with the resolved method.
9023 }
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009024 }
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009025 if (resolved != nullptr) {
9026 // In case of jmvti, the dex file gets verified before being registered, so first
9027 // check if it's registered before checking class tables.
9028 const DexFile& dex_file = *dex_cache->GetDexFile();
Nicolas Geoffraybefa3092018-02-22 14:50:01 +00009029 DCHECK(!IsDexFileRegistered(Thread::Current(), dex_file) ||
9030 FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009031 << "DexFile referrer: " << dex_file.GetLocation()
9032 << " ClassLoader: " << DescribeLoaders(class_loader, "");
9033 // Be a good citizen and update the dex cache to speed subsequent calls.
David Srbecky5de5efe2021-02-15 21:23:00 +00009034 dex_cache->SetResolvedMethod(method_idx, resolved);
Nicolas Geoffraybefa3092018-02-22 14:50:01 +00009035 // Disable the following invariant check as the verifier breaks it. b/73760543
9036 // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
9037 // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
9038 // << "Method: " << resolved->PrettyMethod() << ", "
9039 // << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
9040 // << "DexFile referrer: " << dex_file.GetLocation();
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009041 }
9042 return resolved;
9043}
9044
David Brazdil4525e0b2018-04-05 16:57:32 +01009045// Returns true if `method` is either null or hidden.
9046// Does not print any warnings if it is hidden.
9047static bool CheckNoSuchMethod(ArtMethod* method,
9048 ObjPtr<mirror::DexCache> dex_cache,
9049 ObjPtr<mirror::ClassLoader> class_loader)
9050 REQUIRES_SHARED(Locks::mutator_lock_) {
9051 return method == nullptr ||
David Brazdilf50ac102018-10-17 18:00:06 +01009052 hiddenapi::ShouldDenyAccessToMember(method,
9053 hiddenapi::AccessContext(class_loader, dex_cache),
9054 hiddenapi::AccessMethod::kNone); // no warnings
David Brazdil4525e0b2018-04-05 16:57:32 +01009055}
9056
9057ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
9058 ObjPtr<mirror::DexCache> dex_cache,
9059 ObjPtr<mirror::ClassLoader> class_loader,
9060 uint32_t method_idx) {
9061 if (klass->IsInterface()) {
9062 ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9063 return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
9064 } else {
9065 // If there was an interface method with the same signature, we would have
9066 // found it in the "copied" methods. Only DCHECK that the interface method
9067 // really does not exist.
9068 if (kIsDebugBuild) {
9069 ArtMethod* method =
9070 klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9071 DCHECK(CheckNoSuchMethod(method, dex_cache, class_loader));
9072 }
9073 return nullptr;
9074 }
9075}
9076
Andreas Gampe42ef8ab2015-12-03 17:27:32 -08009077template <ClassLinker::ResolveMode kResolveMode>
Vladimir Marko89011192017-12-11 13:45:05 +00009078ArtMethod* ClassLinker::ResolveMethod(uint32_t method_idx,
Mathieu Chartiere401d142015-04-22 13:56:20 -07009079 Handle<mirror::DexCache> dex_cache,
9080 Handle<mirror::ClassLoader> class_loader,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07009081 ArtMethod* referrer,
9082 InvokeType type) {
Nicolas Geoffray605c5912020-04-08 15:12:39 +01009083 DCHECK(!Thread::Current()->IsExceptionPending()) << Thread::Current()->GetException()->Dump();
Andreas Gampefa4333d2017-02-14 11:10:34 -08009084 DCHECK(dex_cache != nullptr);
Vladimir Markoba118822017-06-12 15:41:56 +01009085 DCHECK(referrer == nullptr || !referrer->IsProxyMethod());
Ian Rogers08f753d2012-08-24 14:35:25 -07009086 // Check for hit in the dex cache.
David Srbecky5de5efe2021-02-15 21:23:00 +00009087 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009088 Thread::PoisonObjectPointersIfDebug();
Vladimir Marko07bfbac2017-07-06 14:55:02 +01009089 DCHECK(resolved == nullptr || !resolved->IsRuntimeMethod());
9090 bool valid_dex_cache_method = resolved != nullptr;
Vladimir Markoba118822017-06-12 15:41:56 +01009091 if (kResolveMode == ResolveMode::kNoChecks && valid_dex_cache_method) {
9092 // We have a valid method from the DexCache and no checks to perform.
Mathieu Chartiere401d142015-04-22 13:56:20 -07009093 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009094 return resolved;
9095 }
Vladimir Marko89011192017-12-11 13:45:05 +00009096 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009097 const dex::MethodId& method_id = dex_file.GetMethodId(method_idx);
Vladimir Markoba118822017-06-12 15:41:56 +01009098 ObjPtr<mirror::Class> klass = nullptr;
9099 if (valid_dex_cache_method) {
9100 // We have a valid method from the DexCache but we need to perform ICCE and IAE checks.
9101 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009102 klass = LookupResolvedType(method_id.class_idx_, dex_cache.Get(), class_loader.Get());
Vladimir Marko6f1bd462017-12-06 17:45:03 +00009103 if (UNLIKELY(klass == nullptr)) {
Nicolas Geoffraybefa3092018-02-22 14:50:01 +00009104 // We normaly should not end up here. However the verifier currently doesn't guarantee
9105 // the invariant of having the klass in the class table. b/73760543
9106 klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
Nicolas Geoffray07614192020-04-28 17:31:31 +01009107 if (klass == nullptr) {
9108 // This can only happen if the current thread is not allowed to load
9109 // classes.
9110 DCHECK(!Thread::Current()->CanLoadClasses());
9111 DCHECK(Thread::Current()->IsExceptionPending());
9112 return nullptr;
9113 }
Vladimir Marko6f1bd462017-12-06 17:45:03 +00009114 }
Vladimir Markoba118822017-06-12 15:41:56 +01009115 } else {
9116 // The method was not in the DexCache, resolve the declaring class.
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009117 klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
Vladimir Markoba118822017-06-12 15:41:56 +01009118 if (klass == nullptr) {
9119 DCHECK(Thread::Current()->IsExceptionPending());
9120 return nullptr;
9121 }
9122 }
9123
9124 // Check if the invoke type matches the class type.
9125 if (kResolveMode == ResolveMode::kCheckICCEAndIAE &&
Andreas Gampe98ea9d92018-10-19 14:06:15 -07009126 CheckInvokeClassMismatch</* kThrow= */ true>(
Vladimir Markoba118822017-06-12 15:41:56 +01009127 dex_cache.Get(), type, [klass]() { return klass; })) {
Elliott Hughescc5f9a92011-09-28 19:17:29 -07009128 DCHECK(Thread::Current()->IsExceptionPending());
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07009129 return nullptr;
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009130 }
Vladimir Markoba118822017-06-12 15:41:56 +01009131
9132 if (!valid_dex_cache_method) {
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009133 resolved = FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009134 }
Vladimir Markoba118822017-06-12 15:41:56 +01009135
9136 // Note: We can check for IllegalAccessError only if we have a referrer.
9137 if (kResolveMode == ResolveMode::kCheckICCEAndIAE && resolved != nullptr && referrer != nullptr) {
9138 ObjPtr<mirror::Class> methods_class = resolved->GetDeclaringClass();
9139 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9140 if (!referring_class->CheckResolvedMethodAccess(methods_class,
9141 resolved,
9142 dex_cache.Get(),
9143 method_idx,
9144 type)) {
9145 DCHECK(Thread::Current()->IsExceptionPending());
9146 return nullptr;
9147 }
9148 }
9149
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07009150 // If we found a method, check for incompatible class changes.
Vladimir Markoba118822017-06-12 15:41:56 +01009151 if (LIKELY(resolved != nullptr) &&
9152 LIKELY(kResolveMode == ResolveMode::kNoChecks ||
9153 !resolved->CheckIncompatibleClassChange(type))) {
Ian Rogers08f753d2012-08-24 14:35:25 -07009154 return resolved;
9155 } else {
Vladimir Markoba118822017-06-12 15:41:56 +01009156 // If we had a method, or if we can find one with another lookup type,
9157 // it's an incompatible-class-change error.
9158 if (resolved == nullptr) {
David Brazdil4525e0b2018-04-05 16:57:32 +01009159 resolved = FindIncompatibleMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
Vladimir Markoba118822017-06-12 15:41:56 +01009160 }
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07009161 if (resolved != nullptr) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07009162 ThrowIncompatibleClassChangeError(type, resolved->GetInvokeType(), resolved, referrer);
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07009163 } else {
Vladimir Markoba118822017-06-12 15:41:56 +01009164 // We failed to find the method (using all lookup types), so throw a NoSuchMethodError.
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07009165 const char* name = dex_file.StringDataByIdx(method_id.name_idx_);
9166 const Signature signature = dex_file.GetMethodSignature(method_id);
Vladimir Markoba118822017-06-12 15:41:56 +01009167 ThrowNoSuchMethodError(type, klass, name, signature);
Ian Rogers08f753d2012-08-24 14:35:25 -07009168 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08009169 Thread::Current()->AssertPendingException();
Andreas Gampeeff0f5d2014-08-13 21:49:37 -07009170 return nullptr;
Ian Rogers08f753d2012-08-24 14:35:25 -07009171 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009172}
9173
Vladimir Marko89011192017-12-11 13:45:05 +00009174ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
Jeff Hao13e748b2015-08-25 20:44:19 +00009175 Handle<mirror::DexCache> dex_cache,
9176 Handle<mirror::ClassLoader> class_loader) {
David Srbecky5de5efe2021-02-15 21:23:00 +00009177 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009178 Thread::PoisonObjectPointersIfDebug();
Vladimir Marko07bfbac2017-07-06 14:55:02 +01009179 if (resolved != nullptr) {
9180 DCHECK(!resolved->IsRuntimeMethod());
Jeff Hao13e748b2015-08-25 20:44:19 +00009181 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
9182 return resolved;
9183 }
9184 // Fail, get the declaring class.
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009185 const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009186 ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
Jeff Hao13e748b2015-08-25 20:44:19 +00009187 if (klass == nullptr) {
9188 Thread::Current()->AssertPendingException();
9189 return nullptr;
9190 }
9191 if (klass->IsInterface()) {
Vladimir Markoba118822017-06-12 15:41:56 +01009192 resolved = klass->FindInterfaceMethod(dex_cache.Get(), method_idx, image_pointer_size_);
9193 } else {
9194 resolved = klass->FindClassMethod(dex_cache.Get(), method_idx, image_pointer_size_);
Jeff Hao13e748b2015-08-25 20:44:19 +00009195 }
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009196 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009197 hiddenapi::ShouldDenyAccessToMember(
9198 resolved,
9199 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
9200 hiddenapi::AccessMethod::kLinking)) {
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009201 resolved = nullptr;
9202 }
Jeff Hao13e748b2015-08-25 20:44:19 +00009203 return resolved;
9204}
9205
Vladimir Markof44d36c2017-03-14 14:18:46 +00009206ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
9207 ObjPtr<mirror::DexCache> dex_cache,
9208 ObjPtr<mirror::ClassLoader> class_loader,
9209 bool is_static) {
9210 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009211 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009212 ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
9213 if (klass == nullptr) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009214 klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009215 }
9216 if (klass == nullptr) {
9217 // The class has not been resolved yet, so the field is also unresolved.
9218 return nullptr;
9219 }
9220 DCHECK(klass->IsResolved());
Vladimir Markof44d36c2017-03-14 14:18:46 +00009221
David Brazdil1ab0fa82018-05-04 11:28:03 +01009222 return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009223}
9224
Vladimir Markoe11dd502017-12-08 14:09:45 +00009225ArtField* ClassLinker::ResolveField(uint32_t field_idx,
Mathieu Chartierc7853442015-03-27 14:35:38 -07009226 Handle<mirror::DexCache> dex_cache,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07009227 Handle<mirror::ClassLoader> class_loader,
9228 bool is_static) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08009229 DCHECK(dex_cache != nullptr);
Nicolas Geoffrayf3688822020-03-25 15:04:03 +00009230 DCHECK(!Thread::Current()->IsExceptionPending()) << Thread::Current()->GetException()->Dump();
David Srbecky5de5efe2021-02-15 21:23:00 +00009231 ArtField* resolved = dex_cache->GetResolvedField(field_idx);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009232 Thread::PoisonObjectPointersIfDebug();
Andreas Gampe58a5af82014-07-31 16:23:49 -07009233 if (resolved != nullptr) {
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009234 return resolved;
9235 }
Vladimir Markoe11dd502017-12-08 14:09:45 +00009236 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009237 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009238 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009239 if (klass == nullptr) {
Ian Rogers9f1ab122011-12-12 08:52:43 -08009240 DCHECK(Thread::Current()->IsExceptionPending());
Andreas Gampe58a5af82014-07-31 16:23:49 -07009241 return nullptr;
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009242 }
9243
David Brazdil1ab0fa82018-05-04 11:28:03 +01009244 resolved = FindResolvedField(klass, dex_cache.Get(), class_loader.Get(), field_idx, is_static);
Andreas Gampe58a5af82014-07-31 16:23:49 -07009245 if (resolved == nullptr) {
Ian Rogers7b0c5b42012-02-16 15:29:07 -08009246 const char* name = dex_file.GetFieldName(field_id);
9247 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009248 ThrowNoSuchFieldError(is_static ? "static " : "instance ", klass, type, name);
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009249 }
Ian Rogersb067ac22011-12-13 18:05:09 -08009250 return resolved;
9251}
9252
Vladimir Markoe11dd502017-12-08 14:09:45 +00009253ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
Mathieu Chartierc7853442015-03-27 14:35:38 -07009254 Handle<mirror::DexCache> dex_cache,
9255 Handle<mirror::ClassLoader> class_loader) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08009256 DCHECK(dex_cache != nullptr);
David Srbecky5de5efe2021-02-15 21:23:00 +00009257 ArtField* resolved = dex_cache->GetResolvedField(field_idx);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009258 Thread::PoisonObjectPointersIfDebug();
Andreas Gampe58a5af82014-07-31 16:23:49 -07009259 if (resolved != nullptr) {
Ian Rogersb067ac22011-12-13 18:05:09 -08009260 return resolved;
9261 }
Vladimir Markoe11dd502017-12-08 14:09:45 +00009262 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009263 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009264 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009265 if (klass == nullptr) {
Ian Rogersb067ac22011-12-13 18:05:09 -08009266 DCHECK(Thread::Current()->IsExceptionPending());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009267 return nullptr;
Ian Rogersb067ac22011-12-13 18:05:09 -08009268 }
9269
David Brazdil1ab0fa82018-05-04 11:28:03 +01009270 resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
9271 if (resolved == nullptr) {
9272 const char* name = dex_file.GetFieldName(field_id);
9273 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009274 ThrowNoSuchFieldError("", klass, type, name);
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009275 }
9276 return resolved;
Carl Shapiro5fafe2b2011-07-09 15:34:41 -07009277}
9278
David Brazdil1ab0fa82018-05-04 11:28:03 +01009279ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
9280 ObjPtr<mirror::DexCache> dex_cache,
9281 ObjPtr<mirror::ClassLoader> class_loader,
9282 uint32_t field_idx,
9283 bool is_static) {
Vladimir Markob10668c2021-06-10 09:52:53 +01009284 ArtField* resolved = is_static ? klass->FindStaticField(dex_cache, field_idx)
9285 : klass->FindInstanceField(dex_cache, field_idx);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009286 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009287 hiddenapi::ShouldDenyAccessToMember(resolved,
9288 hiddenapi::AccessContext(class_loader, dex_cache),
9289 hiddenapi::AccessMethod::kLinking)) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01009290 resolved = nullptr;
9291 }
9292
9293 if (resolved != nullptr) {
David Srbecky5de5efe2021-02-15 21:23:00 +00009294 dex_cache->SetResolvedField(field_idx, resolved);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009295 }
9296
9297 return resolved;
9298}
9299
9300ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
9301 ObjPtr<mirror::DexCache> dex_cache,
9302 ObjPtr<mirror::ClassLoader> class_loader,
9303 uint32_t field_idx) {
Vladimir Markob10668c2021-06-10 09:52:53 +01009304 ArtField* resolved = klass->FindField(dex_cache, field_idx);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009305
9306 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009307 hiddenapi::ShouldDenyAccessToMember(resolved,
9308 hiddenapi::AccessContext(class_loader, dex_cache),
9309 hiddenapi::AccessMethod::kLinking)) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01009310 resolved = nullptr;
9311 }
9312
9313 if (resolved != nullptr) {
David Srbecky5de5efe2021-02-15 21:23:00 +00009314 dex_cache->SetResolvedField(field_idx, resolved);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009315 }
9316
9317 return resolved;
9318}
9319
Vladimir Markoaf940202017-12-08 15:01:18 +00009320ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
9321 Thread* self,
Orion Hodson06d10a72018-05-14 08:53:38 +01009322 dex::ProtoIndex proto_idx,
Vladimir Markoaf940202017-12-08 15:01:18 +00009323 Handle<mirror::DexCache> dex_cache,
9324 Handle<mirror::ClassLoader> class_loader) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009325 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
Andreas Gampefa4333d2017-02-14 11:10:34 -08009326 DCHECK(dex_cache != nullptr);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009327
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009328 ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009329 if (resolved != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01009330 return resolved;
Narayan Kamath25352fc2016-08-03 12:46:58 +01009331 }
9332
Narayan Kamath25352fc2016-08-03 12:46:58 +01009333 StackHandleScope<4> hs(self);
9334
9335 // First resolve the return type.
Vladimir Markoaf940202017-12-08 15:01:18 +00009336 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009337 const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009338 Handle<mirror::Class> return_type(hs.NewHandle(
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009339 ResolveType(proto_id.return_type_idx_, dex_cache, class_loader)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009340 if (return_type == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009341 DCHECK(self->IsExceptionPending());
9342 return nullptr;
9343 }
9344
9345 // Then resolve the argument types.
9346 //
9347 // TODO: Is there a better way to figure out the number of method arguments
9348 // other than by looking at the shorty ?
9349 const size_t num_method_args = strlen(dex_file.StringDataByIdx(proto_id.shorty_idx_)) - 1;
9350
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009351 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009352 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9353 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009354 if (method_params == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009355 DCHECK(self->IsExceptionPending());
9356 return nullptr;
9357 }
9358
9359 DexFileParameterIterator it(dex_file, proto_id);
9360 int32_t i = 0;
9361 MutableHandle<mirror::Class> param_class = hs.NewHandle<mirror::Class>(nullptr);
9362 for (; it.HasNext(); it.Next()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08009363 const dex::TypeIndex type_idx = it.GetTypeIdx();
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009364 param_class.Assign(ResolveType(type_idx, dex_cache, class_loader));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009365 if (param_class == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009366 DCHECK(self->IsExceptionPending());
9367 return nullptr;
9368 }
9369
9370 method_params->Set(i++, param_class.Get());
9371 }
9372
9373 DCHECK(!it.HasNext());
9374
9375 Handle<mirror::MethodType> type = hs.NewHandle(
9376 mirror::MethodType::Create(self, return_type, method_params));
9377 dex_cache->SetResolvedMethodType(proto_idx, type.Get());
9378
9379 return type.Get();
9380}
9381
Vladimir Markoaf940202017-12-08 15:01:18 +00009382ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
Orion Hodson06d10a72018-05-14 08:53:38 +01009383 dex::ProtoIndex proto_idx,
Vladimir Markoaf940202017-12-08 15:01:18 +00009384 ArtMethod* referrer) {
Orion Hodson2e599942017-09-22 16:17:41 +01009385 StackHandleScope<2> hs(self);
Orion Hodson2e599942017-09-22 16:17:41 +01009386 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9387 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
Vladimir Markoaf940202017-12-08 15:01:18 +00009388 return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
Orion Hodson2e599942017-09-22 16:17:41 +01009389}
9390
Vladimir Marko5aead702019-03-27 11:00:36 +00009391ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009392 Thread* self,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009393 const dex::MethodHandleItem& method_handle,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009394 ArtMethod* referrer) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009395 DexFile::MethodHandleType handle_type =
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009396 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9397 mirror::MethodHandle::Kind kind;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009398 bool is_put;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009399 bool is_static;
9400 int32_t num_params;
Orion Hodsonc069a302017-01-18 09:23:12 +00009401 switch (handle_type) {
9402 case DexFile::MethodHandleType::kStaticPut: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009403 kind = mirror::MethodHandle::Kind::kStaticPut;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009404 is_put = true;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009405 is_static = true;
9406 num_params = 1;
Orion Hodson631827d2017-04-10 14:53:47 +01009407 break;
9408 }
9409 case DexFile::MethodHandleType::kStaticGet: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009410 kind = mirror::MethodHandle::Kind::kStaticGet;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009411 is_put = false;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009412 is_static = true;
9413 num_params = 0;
Orion Hodson631827d2017-04-10 14:53:47 +01009414 break;
9415 }
9416 case DexFile::MethodHandleType::kInstancePut: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009417 kind = mirror::MethodHandle::Kind::kInstancePut;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009418 is_put = true;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009419 is_static = false;
Orion Hodsonc069a302017-01-18 09:23:12 +00009420 num_params = 2;
9421 break;
9422 }
9423 case DexFile::MethodHandleType::kInstanceGet: {
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009424 kind = mirror::MethodHandle::Kind::kInstanceGet;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009425 is_put = false;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009426 is_static = false;
Orion Hodsonc069a302017-01-18 09:23:12 +00009427 num_params = 1;
9428 break;
9429 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009430 case DexFile::MethodHandleType::kInvokeStatic:
Orion Hodson82b351f2017-07-05 14:34:25 +01009431 case DexFile::MethodHandleType::kInvokeInstance:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009432 case DexFile::MethodHandleType::kInvokeConstructor:
Orion Hodson82b351f2017-07-05 14:34:25 +01009433 case DexFile::MethodHandleType::kInvokeDirect:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009434 case DexFile::MethodHandleType::kInvokeInterface:
9435 UNREACHABLE();
Orion Hodsonc069a302017-01-18 09:23:12 +00009436 }
9437
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009438 ArtField* target_field =
9439 ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
9440 if (LIKELY(target_field != nullptr)) {
9441 ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
9442 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9443 if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
9444 ThrowIllegalAccessErrorField(referring_class, target_field);
9445 return nullptr;
9446 }
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009447 if (UNLIKELY(is_put && target_field->IsFinal())) {
9448 ThrowIllegalAccessErrorField(referring_class, target_field);
9449 return nullptr;
9450 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009451 } else {
9452 DCHECK(Thread::Current()->IsExceptionPending());
9453 return nullptr;
9454 }
9455
9456 StackHandleScope<4> hs(self);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009457 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Orion Hodsonc069a302017-01-18 09:23:12 +00009458 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9459 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009460 if (UNLIKELY(method_params == nullptr)) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009461 DCHECK(self->IsExceptionPending());
9462 return nullptr;
9463 }
9464
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009465 Handle<mirror::Class> constructor_class;
Orion Hodsonc069a302017-01-18 09:23:12 +00009466 Handle<mirror::Class> return_type;
9467 switch (handle_type) {
9468 case DexFile::MethodHandleType::kStaticPut: {
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009469 method_params->Set(0, target_field->ResolveType());
Vladimir Marko9186b182018-11-06 14:55:54 +00009470 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
Orion Hodsonc069a302017-01-18 09:23:12 +00009471 break;
9472 }
9473 case DexFile::MethodHandleType::kStaticGet: {
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009474 return_type = hs.NewHandle(target_field->ResolveType());
Orion Hodsonc069a302017-01-18 09:23:12 +00009475 break;
9476 }
9477 case DexFile::MethodHandleType::kInstancePut: {
Orion Hodson631827d2017-04-10 14:53:47 +01009478 method_params->Set(0, target_field->GetDeclaringClass());
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009479 method_params->Set(1, target_field->ResolveType());
Vladimir Marko9186b182018-11-06 14:55:54 +00009480 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
Orion Hodsonc069a302017-01-18 09:23:12 +00009481 break;
9482 }
9483 case DexFile::MethodHandleType::kInstanceGet: {
Orion Hodson631827d2017-04-10 14:53:47 +01009484 method_params->Set(0, target_field->GetDeclaringClass());
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009485 return_type = hs.NewHandle(target_field->ResolveType());
Orion Hodsonc069a302017-01-18 09:23:12 +00009486 break;
9487 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009488 case DexFile::MethodHandleType::kInvokeStatic:
Orion Hodson631827d2017-04-10 14:53:47 +01009489 case DexFile::MethodHandleType::kInvokeInstance:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009490 case DexFile::MethodHandleType::kInvokeConstructor:
9491 case DexFile::MethodHandleType::kInvokeDirect:
Orion Hodson631827d2017-04-10 14:53:47 +01009492 case DexFile::MethodHandleType::kInvokeInterface:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009493 UNREACHABLE();
9494 }
9495
9496 for (int32_t i = 0; i < num_params; ++i) {
9497 if (UNLIKELY(method_params->Get(i) == nullptr)) {
9498 DCHECK(self->IsExceptionPending());
9499 return nullptr;
Orion Hodsonc069a302017-01-18 09:23:12 +00009500 }
9501 }
9502
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009503 if (UNLIKELY(return_type.IsNull())) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009504 DCHECK(self->IsExceptionPending());
9505 return nullptr;
9506 }
9507
9508 Handle<mirror::MethodType>
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009509 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9510 if (UNLIKELY(method_type.IsNull())) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009511 DCHECK(self->IsExceptionPending());
9512 return nullptr;
9513 }
Orion Hodson631827d2017-04-10 14:53:47 +01009514
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009515 uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
9516 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9517}
9518
Vladimir Marko5aead702019-03-27 11:00:36 +00009519ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009520 Thread* self,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009521 const dex::MethodHandleItem& method_handle,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009522 ArtMethod* referrer) {
9523 DexFile::MethodHandleType handle_type =
9524 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9525 mirror::MethodHandle::Kind kind;
9526 uint32_t receiver_count = 0;
9527 ArtMethod* target_method = nullptr;
9528 switch (handle_type) {
9529 case DexFile::MethodHandleType::kStaticPut:
9530 case DexFile::MethodHandleType::kStaticGet:
9531 case DexFile::MethodHandleType::kInstancePut:
9532 case DexFile::MethodHandleType::kInstanceGet:
9533 UNREACHABLE();
9534 case DexFile::MethodHandleType::kInvokeStatic: {
9535 kind = mirror::MethodHandle::Kind::kInvokeStatic;
9536 receiver_count = 0;
Vladimir Markoba118822017-06-12 15:41:56 +01009537 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9538 method_handle.field_or_method_idx_,
9539 referrer,
9540 InvokeType::kStatic);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009541 break;
9542 }
9543 case DexFile::MethodHandleType::kInvokeInstance: {
9544 kind = mirror::MethodHandle::Kind::kInvokeVirtual;
9545 receiver_count = 1;
Vladimir Markoba118822017-06-12 15:41:56 +01009546 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9547 method_handle.field_or_method_idx_,
9548 referrer,
9549 InvokeType::kVirtual);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009550 break;
9551 }
9552 case DexFile::MethodHandleType::kInvokeConstructor: {
9553 // Constructors are currently implemented as a transform. They
9554 // are special cased later in this method.
9555 kind = mirror::MethodHandle::Kind::kInvokeTransform;
9556 receiver_count = 0;
Vladimir Markoba118822017-06-12 15:41:56 +01009557 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9558 method_handle.field_or_method_idx_,
9559 referrer,
9560 InvokeType::kDirect);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009561 break;
9562 }
9563 case DexFile::MethodHandleType::kInvokeDirect: {
9564 kind = mirror::MethodHandle::Kind::kInvokeDirect;
9565 receiver_count = 1;
9566 StackHandleScope<2> hs(self);
9567 // A constant method handle with type kInvokeDirect can refer to
9568 // a method that is private or to a method in a super class. To
9569 // disambiguate the two options, we resolve the method ignoring
9570 // the invocation type to determine if the method is private. We
9571 // then resolve again specifying the intended invocation type to
9572 // force the appropriate checks.
Vladimir Marko89011192017-12-11 13:45:05 +00009573 target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009574 hs.NewHandle(referrer->GetDexCache()),
9575 hs.NewHandle(referrer->GetClassLoader()));
9576 if (UNLIKELY(target_method == nullptr)) {
9577 break;
9578 }
9579
9580 if (target_method->IsPrivate()) {
9581 kind = mirror::MethodHandle::Kind::kInvokeDirect;
Vladimir Markoba118822017-06-12 15:41:56 +01009582 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9583 method_handle.field_or_method_idx_,
9584 referrer,
9585 InvokeType::kDirect);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009586 } else {
9587 kind = mirror::MethodHandle::Kind::kInvokeSuper;
Vladimir Markoba118822017-06-12 15:41:56 +01009588 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9589 method_handle.field_or_method_idx_,
9590 referrer,
9591 InvokeType::kSuper);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009592 if (UNLIKELY(target_method == nullptr)) {
9593 break;
9594 }
9595 // Find the method specified in the parent in referring class
9596 // so invoke-super invokes the method in the parent of the
9597 // referrer.
9598 target_method =
9599 referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
9600 kRuntimePointerSize);
9601 }
9602 break;
9603 }
9604 case DexFile::MethodHandleType::kInvokeInterface: {
9605 kind = mirror::MethodHandle::Kind::kInvokeInterface;
9606 receiver_count = 1;
Vladimir Markoba118822017-06-12 15:41:56 +01009607 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9608 method_handle.field_or_method_idx_,
9609 referrer,
9610 InvokeType::kInterface);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009611 break;
9612 }
Orion Hodson631827d2017-04-10 14:53:47 +01009613 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009614
9615 if (UNLIKELY(target_method == nullptr)) {
9616 DCHECK(Thread::Current()->IsExceptionPending());
9617 return nullptr;
9618 }
9619
9620 ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
9621 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9622 uint32_t access_flags = target_method->GetAccessFlags();
9623 if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
9624 ThrowIllegalAccessErrorMethod(referring_class, target_method);
9625 return nullptr;
9626 }
9627
9628 // Calculate the number of parameters from the method shorty. We add the
9629 // receiver count (0 or 1) and deduct one for the return value.
9630 uint32_t shorty_length;
9631 target_method->GetShorty(&shorty_length);
9632 int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
9633
Orion Hodsonecd58562018-09-24 11:27:33 +01009634 StackHandleScope<5> hs(self);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009635 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009636 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9637 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9638 if (method_params.Get() == nullptr) {
9639 DCHECK(self->IsExceptionPending());
9640 return nullptr;
9641 }
9642
Orion Hodsonecd58562018-09-24 11:27:33 +01009643 const DexFile* dex_file = referrer->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009644 const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009645 int32_t index = 0;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009646 if (receiver_count != 0) {
Orion Hodsonecd58562018-09-24 11:27:33 +01009647 // Insert receiver. Use the class identified in the method handle rather than the declaring
9648 // class of the resolved method which may be super class or default interface method
9649 // (b/115964401).
9650 ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
9651 // receiver_class should have been resolved when resolving the target method.
9652 DCHECK(receiver_class != nullptr);
9653 method_params->Set(index++, receiver_class);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009654 }
Orion Hodsonecd58562018-09-24 11:27:33 +01009655
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009656 const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
Orion Hodsonecd58562018-09-24 11:27:33 +01009657 DexFileParameterIterator it(*dex_file, proto_id);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009658 while (it.HasNext()) {
Orion Hodsonda1cdd02018-01-31 18:08:28 +00009659 DCHECK_LT(index, num_params);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009660 const dex::TypeIndex type_idx = it.GetTypeIdx();
Orion Hodsonecd58562018-09-24 11:27:33 +01009661 ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009662 if (nullptr == klass) {
9663 DCHECK(self->IsExceptionPending());
9664 return nullptr;
9665 }
9666 method_params->Set(index++, klass);
9667 it.Next();
9668 }
9669
Orion Hodsonecd58562018-09-24 11:27:33 +01009670 Handle<mirror::Class> return_type =
9671 hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009672 if (UNLIKELY(return_type.IsNull())) {
9673 DCHECK(self->IsExceptionPending());
9674 return nullptr;
9675 }
9676
9677 Handle<mirror::MethodType>
9678 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9679 if (UNLIKELY(method_type.IsNull())) {
9680 DCHECK(self->IsExceptionPending());
9681 return nullptr;
9682 }
9683
9684 if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
9685 Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
9686 Handle<mirror::MethodHandlesLookup> lookup =
9687 hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
9688 return lookup->FindConstructor(self, constructor_class, method_type);
9689 }
9690
9691 uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
9692 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9693}
9694
Vladimir Markoaf940202017-12-08 15:01:18 +00009695ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
9696 uint32_t method_handle_idx,
9697 ArtMethod* referrer)
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009698 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009699 const DexFile* const dex_file = referrer->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009700 const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009701 switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
9702 case DexFile::MethodHandleType::kStaticPut:
9703 case DexFile::MethodHandleType::kStaticGet:
9704 case DexFile::MethodHandleType::kInstancePut:
9705 case DexFile::MethodHandleType::kInstanceGet:
9706 return ResolveMethodHandleForField(self, method_handle, referrer);
9707 case DexFile::MethodHandleType::kInvokeStatic:
9708 case DexFile::MethodHandleType::kInvokeInstance:
9709 case DexFile::MethodHandleType::kInvokeConstructor:
9710 case DexFile::MethodHandleType::kInvokeDirect:
9711 case DexFile::MethodHandleType::kInvokeInterface:
Orion Hodsonda1cdd02018-01-31 18:08:28 +00009712 return ResolveMethodHandleForMethod(self, method_handle, referrer);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009713 }
Orion Hodsonc069a302017-01-18 09:23:12 +00009714}
9715
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009716bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
9717 return (entry_point == GetQuickResolutionStub()) ||
9718 (quick_resolution_trampoline_ == entry_point);
9719}
9720
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009721bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
9722 return (entry_point == GetQuickToInterpreterBridge()) ||
9723 (quick_to_interpreter_bridge_trampoline_ == entry_point);
9724}
9725
9726bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
9727 return (entry_point == GetQuickGenericJniStub()) ||
9728 (quick_generic_jni_trampoline_ == entry_point);
9729}
9730
David Sehra49e0532017-08-25 08:05:29 -07009731bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
Vladimir Marko7dac8642019-11-06 17:09:30 +00009732 return entry_point == GetJniDlsymLookupStub() ||
9733 (jni_dlsym_lookup_trampoline_ == entry_point);
David Sehra49e0532017-08-25 08:05:29 -07009734}
9735
Vladimir Markofa458ac2020-02-12 14:08:07 +00009736bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
9737 return entry_point == GetJniDlsymLookupCriticalStub() ||
9738 (jni_dlsym_lookup_critical_trampoline_ == entry_point);
9739}
9740
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009741const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
9742 return GetQuickGenericJniStub();
9743}
9744
Mathieu Chartiere401d142015-04-22 13:56:20 -07009745void ClassLinker::SetEntryPointsToInterpreter(ArtMethod* method) const {
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009746 if (!method->IsNative()) {
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009747 method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
9748 } else {
Goran Jakovljevicc16268f2017-07-27 10:03:32 +02009749 method->SetEntryPointFromQuickCompiledCode(GetQuickGenericJniStub());
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009750 }
9751}
9752
Alex Lightdb01a092017-04-03 15:39:55 -07009753void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
9754 DCHECK(method->IsObsolete());
9755 // We cannot mess with the entrypoints of native methods because they are used to determine how
9756 // large the method's quick stack frame is. Without this information we cannot walk the stacks.
9757 if (!method->IsNative()) {
9758 method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
9759 }
9760}
9761
Ian Rogers7dfb28c2013-08-22 08:18:36 -07009762void ClassLinker::DumpForSigQuit(std::ostream& os) {
Mathieu Chartier6b069532015-08-05 15:08:12 -07009763 ScopedObjectAccess soa(Thread::Current());
Mathieu Chartier6b069532015-08-05 15:08:12 -07009764 ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009765 os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
9766 << NumNonZygoteClasses() << "\n";
Nicolas Geoffraya90c9222018-09-07 13:19:19 +01009767 ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
9768 os << "Dumping registered class loaders\n";
9769 size_t class_loader_index = 0;
9770 for (const ClassLoaderData& class_loader : class_loaders_) {
9771 ObjPtr<mirror::ClassLoader> loader =
9772 ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
9773 if (loader != nullptr) {
9774 os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
9775 bool saw_one_dex_file = false;
9776 for (const DexCacheData& dex_cache : dex_caches_) {
9777 if (dex_cache.IsValid() && dex_cache.class_table == class_loader.class_table) {
9778 if (saw_one_dex_file) {
9779 os << ":";
9780 }
9781 saw_one_dex_file = true;
9782 os << dex_cache.dex_file->GetLocation();
9783 }
9784 }
9785 os << "]";
9786 bool found_parent = false;
9787 if (loader->GetParent() != nullptr) {
9788 size_t parent_index = 0;
9789 for (const ClassLoaderData& class_loader2 : class_loaders_) {
9790 ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
9791 soa.Self()->DecodeJObject(class_loader2.weak_root));
9792 if (loader2 == loader->GetParent()) {
9793 os << ", parent #" << parent_index;
9794 found_parent = true;
9795 break;
9796 }
9797 parent_index++;
9798 }
9799 if (!found_parent) {
9800 os << ", unregistered parent of type "
9801 << loader->GetParent()->GetClass()->PrettyDescriptor();
9802 }
9803 } else {
9804 os << ", no parent";
9805 }
9806 os << "\n";
9807 }
9808 }
9809 os << "Done dumping class loaders\n";
Andreas Gampe9b7f8b52019-06-07 08:59:29 -07009810 Runtime* runtime = Runtime::Current();
9811 os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
9812 << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009813}
9814
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009815class CountClassesVisitor : public ClassLoaderVisitor {
9816 public:
9817 CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
9818
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009819 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01009820 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009821 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier6b069532015-08-05 15:08:12 -07009822 if (class_table != nullptr) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00009823 num_zygote_classes += class_table->NumZygoteClasses(class_loader);
9824 num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
Mathieu Chartier6b069532015-08-05 15:08:12 -07009825 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009826 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009827
9828 size_t num_zygote_classes;
9829 size_t num_non_zygote_classes;
9830};
9831
9832size_t ClassLinker::NumZygoteClasses() const {
9833 CountClassesVisitor visitor;
9834 VisitClassLoaders(&visitor);
Andreas Gampe2af99022017-04-25 08:32:59 -07009835 return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009836}
9837
9838size_t ClassLinker::NumNonZygoteClasses() const {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07009839 CountClassesVisitor visitor;
9840 VisitClassLoaders(&visitor);
Andreas Gampe2af99022017-04-25 08:32:59 -07009841 return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
Elliott Hughescac6cc72011-11-03 20:31:21 -07009842}
9843
Ian Rogers7dfb28c2013-08-22 08:18:36 -07009844size_t ClassLinker::NumLoadedClasses() {
Ian Rogers1bf8d4d2013-05-30 00:18:49 -07009845 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Mathieu Chartierc2e20622014-11-03 11:41:47 -08009846 // Only return non zygote classes since these are the ones which apps which care about.
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07009847 return NumNonZygoteClasses();
Elliott Hughese27955c2011-08-26 15:21:24 -07009848}
9849
Brian Carlstrom47d237a2011-10-18 15:08:33 -07009850pid_t ClassLinker::GetClassesLockOwner() {
Ian Rogersb726dcb2012-09-05 08:57:23 -07009851 return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
Brian Carlstrom47d237a2011-10-18 15:08:33 -07009852}
9853
9854pid_t ClassLinker::GetDexLockOwner() {
Andreas Gampecc1b5352016-12-01 16:58:38 -08009855 return Locks::dex_lock_->GetExclusiveOwnerTid();
Brian Carlstrom24a3c2e2011-10-17 18:07:52 -07009856}
9857
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009858void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08009859 DCHECK(!init_done_);
9860
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009861 DCHECK(klass != nullptr);
9862 DCHECK(klass->GetClassLoader() == nullptr);
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08009863
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07009864 mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009865 DCHECK(class_roots != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01009866 DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
9867 int32_t index = static_cast<int32_t>(class_root);
9868 DCHECK(class_roots->Get(index) == nullptr);
9869 class_roots->Set<false>(index, klass);
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009870}
9871
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009872ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
9873 Thread* self,
9874 const std::vector<const DexFile*>& dex_files,
9875 Handle<mirror::Class> loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +00009876 Handle<mirror::ClassLoader> parent_loader,
9877 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries) {
Calin Juravle7865ac72017-06-28 11:03:12 -07009878
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009879 StackHandleScope<5> hs(self);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009880
Mathieu Chartierc7853442015-03-27 14:35:38 -07009881 ArtField* dex_elements_field =
Andreas Gampe08883de2016-11-08 13:20:52 -08009882 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009883
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009884 Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009885 DCHECK(dex_elements_class != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009886 DCHECK(dex_elements_class->IsArrayClass());
Mathieu Chartierdaaf3262015-03-24 13:30:28 -07009887 Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
Mathieu Chartier3398c782016-09-30 10:27:43 -07009888 mirror::ObjectArray<mirror::Object>::Alloc(self,
9889 dex_elements_class.Get(),
9890 dex_files.size())));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009891 Handle<mirror::Class> h_dex_element_class =
9892 hs.NewHandle(dex_elements_class->GetComponentType());
9893
Mathieu Chartierc7853442015-03-27 14:35:38 -07009894 ArtField* element_file_field =
Andreas Gampe08883de2016-11-08 13:20:52 -08009895 jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009896 DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009897
Andreas Gampe08883de2016-11-08 13:20:52 -08009898 ArtField* cookie_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
Vladimir Marko208f6702017-12-08 12:00:50 +00009899 DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009900
Andreas Gampe08883de2016-11-08 13:20:52 -08009901 ArtField* file_name_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_fileName);
Vladimir Marko208f6702017-12-08 12:00:50 +00009902 DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
Mathieu Chartierfbc31082016-01-24 11:59:56 -08009903
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009904 // Fill the elements array.
9905 int32_t index = 0;
9906 for (const DexFile* dex_file : dex_files) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08009907 StackHandleScope<4> hs2(self);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009908
Calin Juravle7865ac72017-06-28 11:03:12 -07009909 // CreateWellKnownClassLoader is only used by gtests and compiler.
9910 // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
Mathieu Chartiere58991b2015-10-13 07:59:34 -07009911 Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
9912 self,
9913 kDexFileIndexStart + 1));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009914 DCHECK(h_long_array != nullptr);
Vladimir Marko78baed52018-10-11 10:44:58 +01009915 h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009916
Mathieu Chartier3738e982017-05-12 16:07:28 -07009917 // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
9918 // FinalizerReference which will never get cleaned up without a started runtime.
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009919 Handle<mirror::Object> h_dex_file = hs2.NewHandle(
Mathieu Chartierc7853442015-03-27 14:35:38 -07009920 cookie_field->GetDeclaringClass()->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009921 DCHECK(h_dex_file != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009922 cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009923
Mathieu Chartierfbc31082016-01-24 11:59:56 -08009924 Handle<mirror::String> h_file_name = hs2.NewHandle(
9925 mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009926 DCHECK(h_file_name != nullptr);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08009927 file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
9928
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009929 Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009930 DCHECK(h_element != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009931 element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009932
9933 h_dex_elements->Set(index, h_element.Get());
9934 index++;
9935 }
9936 DCHECK_EQ(index, h_dex_elements->GetLength());
9937
9938 // Create DexPathList.
9939 Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
Mathieu Chartierc7853442015-03-27 14:35:38 -07009940 dex_elements_field->GetDeclaringClass()->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009941 DCHECK(h_dex_path_list != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009942 // Set elements.
Mathieu Chartierc7853442015-03-27 14:35:38 -07009943 dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
Andreas Gampe473191c2017-12-28 16:55:31 -08009944 // Create an empty List for the "nativeLibraryDirectories," required for native tests.
9945 // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
9946 // elements.
9947 {
9948 ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
9949 FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
9950 DCHECK(native_lib_dirs != nullptr);
9951 ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
9952 DCHECK(list_class != nullptr);
9953 {
9954 StackHandleScope<1> h_list_scope(self);
9955 Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
9956 bool list_init = EnsureInitialized(self, h_list_class, true, true);
9957 DCHECK(list_init);
9958 list_class = h_list_class.Get();
9959 }
9960 ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
9961 // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
9962 // is fine for testing. While it violates a Java-code invariant (the elementData field is
9963 // normally never null), as long as one does not try to add elements, this will still
9964 // work.
9965 native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
9966 }
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009967
Calin Juravle7865ac72017-06-28 11:03:12 -07009968 // Create the class loader..
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009969 Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
9970 ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
Calin Juravle7865ac72017-06-28 11:03:12 -07009971 DCHECK(h_class_loader != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009972 // Set DexPathList.
Mathieu Chartierc7853442015-03-27 14:35:38 -07009973 ArtField* path_list_field =
Andreas Gampe08883de2016-11-08 13:20:52 -08009974 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009975 DCHECK(path_list_field != nullptr);
Calin Juravle7865ac72017-06-28 11:03:12 -07009976 path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -07009977
9978 // Make a pretend boot-classpath.
9979 // TODO: Should we scan the image?
Mathieu Chartierc7853442015-03-27 14:35:38 -07009980 ArtField* const parent_field =
Vladimir Markoe300c4e2021-06-08 16:00:05 +01009981 jni::DecodeArtField(WellKnownClasses::java_lang_ClassLoader_parent);
Roland Levillainf39c9eb2015-05-26 15:02:07 +01009982 DCHECK(parent_field != nullptr);
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009983 if (parent_loader.Get() == nullptr) {
9984 ScopedObjectAccessUnchecked soa(self);
9985 ObjPtr<mirror::Object> boot_loader(soa.Decode<mirror::Class>(
9986 WellKnownClasses::java_lang_BootClassLoader)->AllocObject(self));
9987 parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
9988 } else {
9989 parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
9990 }
Calin Juravle7865ac72017-06-28 11:03:12 -07009991
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +00009992 ArtField* shared_libraries_field =
9993 jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
9994 DCHECK(shared_libraries_field != nullptr);
9995 shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
9996
9997 return h_class_loader.Get();
9998}
9999
10000jobject ClassLinker::CreateWellKnownClassLoader(Thread* self,
10001 const std::vector<const DexFile*>& dex_files,
10002 jclass loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +000010003 jobject parent_loader,
10004 jobject shared_libraries) {
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010005 CHECK(self->GetJniEnv()->IsSameObject(loader_class,
10006 WellKnownClasses::dalvik_system_PathClassLoader) ||
10007 self->GetJniEnv()->IsSameObject(loader_class,
David Brazdil1a9ac532019-03-05 11:57:13 +000010008 WellKnownClasses::dalvik_system_DelegateLastClassLoader) ||
10009 self->GetJniEnv()->IsSameObject(loader_class,
10010 WellKnownClasses::dalvik_system_InMemoryDexClassLoader));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010011
10012 // SOAAlreadyRunnable is protected, and we need something to add a global reference.
10013 // We could move the jobject to the callers, but all call-sites do this...
10014 ScopedObjectAccessUnchecked soa(self);
10015
10016 // For now, create a libcore-level DexFile for each ART DexFile. This "explodes" multidex.
Nicolas Geoffraye1672732018-11-30 01:09:49 +000010017 StackHandleScope<4> hs(self);
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010018
10019 Handle<mirror::Class> h_loader_class =
10020 hs.NewHandle<mirror::Class>(soa.Decode<mirror::Class>(loader_class));
Nicolas Geoffraye1672732018-11-30 01:09:49 +000010021 Handle<mirror::ClassLoader> h_parent =
10022 hs.NewHandle<mirror::ClassLoader>(soa.Decode<mirror::ClassLoader>(parent_loader));
10023 Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries =
10024 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010025
10026 ObjPtr<mirror::ClassLoader> loader = CreateWellKnownClassLoader(
10027 self,
10028 dex_files,
10029 h_loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +000010030 h_parent,
10031 h_shared_libraries);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010032
10033 // Make it a global ref and return.
10034 ScopedLocalRef<jobject> local_ref(
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010035 soa.Env(), soa.Env()->AddLocalReference<jobject>(loader));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010036 return soa.Env()->NewGlobalRef(local_ref.get());
10037}
10038
Calin Juravle7865ac72017-06-28 11:03:12 -070010039jobject ClassLinker::CreatePathClassLoader(Thread* self,
10040 const std::vector<const DexFile*>& dex_files) {
10041 return CreateWellKnownClassLoader(self,
10042 dex_files,
10043 WellKnownClasses::dalvik_system_PathClassLoader,
10044 nullptr);
10045}
10046
Andreas Gampe8ac75952015-06-02 21:01:45 -070010047void ClassLinker::DropFindArrayClassCache() {
10048 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
10049 find_array_class_cache_next_victim_ = 0;
10050}
10051
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010052void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010053 Thread* const self = Thread::Current();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010054 for (const ClassLoaderData& data : class_loaders_) {
Mathieu Chartier4843bd52015-10-01 17:08:44 -070010055 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
Mathieu Chartierc4f39252016-10-05 18:32:08 -070010056 ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
10057 self->DecodeJObject(data.weak_root));
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010058 if (class_loader != nullptr) {
Vladimir Markod93e3742018-07-18 10:58:13 +010010059 visitor->Visit(class_loader);
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010060 }
10061 }
10062}
10063
Alexey Grebenkin252a4e42018-04-02 18:18:01 +030010064void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
10065 for (const ClassLoaderData& data : class_loaders_) {
10066 LinearAlloc* alloc = data.allocator;
10067 if (alloc != nullptr && !visitor->Visit(alloc)) {
10068 break;
10069 }
10070 }
10071}
10072
Mathieu Chartierbc5a7952016-10-17 15:46:31 -070010073void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
10074 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier00310e02015-10-17 12:46:42 -070010075 DCHECK(dex_file != nullptr);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010076 Thread* const self = Thread::Current();
10077 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Markobcf17522018-06-01 13:14:32 +010010078 ClassTable* const table = ClassTableForClassLoader(class_loader);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010079 DCHECK(table != nullptr);
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -070010080 if (table->InsertStrongRoot(dex_file) && class_loader != nullptr) {
Mathieu Chartier00310e02015-10-17 12:46:42 -070010081 // It was not already inserted, perform the write barrier to let the GC know the class loader's
10082 // class table was modified.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -070010083 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010084 }
10085}
10086
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010087void ClassLinker::CleanupClassLoaders() {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010088 Thread* const self = Thread::Current();
Mathieu Chartier65975772016-08-05 10:46:36 -070010089 std::vector<ClassLoaderData> to_delete;
10090 // Do the delete outside the lock to avoid lock violation in jit code cache.
10091 {
10092 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10093 for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
10094 const ClassLoaderData& data = *it;
10095 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
Mathieu Chartierc4f39252016-10-05 18:32:08 -070010096 ObjPtr<mirror::ClassLoader> class_loader =
10097 ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
Mathieu Chartier65975772016-08-05 10:46:36 -070010098 if (class_loader != nullptr) {
10099 ++it;
10100 } else {
10101 VLOG(class_linker) << "Freeing class loader";
10102 to_delete.push_back(data);
10103 it = class_loaders_.erase(it);
10104 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010105 }
10106 }
Mathieu Chartier65975772016-08-05 10:46:36 -070010107 for (ClassLoaderData& data : to_delete) {
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +030010108 // CHA unloading analysis and SingleImplementaion cleanups are required.
Andreas Gampe98ea9d92018-10-19 14:06:15 -070010109 DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
Mathieu Chartier65975772016-08-05 10:46:36 -070010110 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010111}
10112
Mathieu Chartier65975772016-08-05 10:46:36 -070010113class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
10114 public:
10115 FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
10116 : method_(method),
10117 pointer_size_(pointer_size) {}
10118
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010010119 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
Mathieu Chartier65975772016-08-05 10:46:36 -070010120 if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
10121 holder_ = klass;
10122 }
10123 // Return false to stop searching if holder_ is not null.
10124 return holder_ == nullptr;
10125 }
10126
Mathieu Chartier28357fa2016-10-18 16:27:40 -070010127 ObjPtr<mirror::Class> holder_ = nullptr;
Mathieu Chartier65975772016-08-05 10:46:36 -070010128 const ArtMethod* const method_;
10129 const PointerSize pointer_size_;
10130};
10131
Vladimir Markoa8bba7d2018-05-30 15:18:48 +010010132ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
Mathieu Chartier65975772016-08-05 10:46:36 -070010133 ScopedTrace trace(__FUNCTION__); // Since this function is slow, have a trace to notify people.
10134 CHECK(method->IsCopied());
10135 FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
10136 VisitClasses(&visitor);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +010010137 return visitor.holder_;
Mathieu Chartier65975772016-08-05 10:46:36 -070010138}
10139
Vladimir Markoa8bba7d2018-05-30 15:18:48 +010010140ObjPtr<mirror::IfTable> ClassLinker::AllocIfTable(Thread* self, size_t ifcount) {
10141 return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
Andreas Gampec6ea7d02017-02-01 16:46:28 -080010142 mirror::IfTable::Alloc(self,
Vladimir Markob4eb1b12018-05-24 11:09:38 +010010143 GetClassRoot<mirror::ObjectArray<mirror::Object>>(this),
Vladimir Markoa8bba7d2018-05-30 15:18:48 +010010144 ifcount * mirror::IfTable::kMax)));
Andreas Gampec6ea7d02017-02-01 16:46:28 -080010145}
10146
Vladimir Markod1f73512020-04-02 10:50:35 +010010147bool ClassLinker::IsUpdatableBootClassPathDescriptor(const char* descriptor ATTRIBUTE_UNUSED) {
10148 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10149 LOG(FATAL) << "UNREACHABLE";
10150 UNREACHABLE();
10151}
10152
Calin Juravle33787682019-07-26 14:27:18 -070010153bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const
10154 REQUIRES_SHARED(Locks::mutator_lock_) {
10155 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10156 LOG(FATAL) << "UNREACHABLE";
10157 UNREACHABLE();
10158}
10159
10160bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const
10161 REQUIRES_SHARED(Locks::mutator_lock_) {
10162 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10163 LOG(FATAL) << "UNREACHABLE";
10164 UNREACHABLE();
10165}
10166
10167bool ClassLinker::DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const {
10168 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10169 LOG(FATAL) << "UNREACHABLE";
10170 UNREACHABLE();
10171}
10172
Calin Juravle2c2724c2021-01-14 19:54:23 -080010173void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) {
10174 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10175 LOG(FATAL) << "UNREACHABLE";
10176 UNREACHABLE();
10177}
10178
Roland Levillain0e840272018-08-23 19:55:30 +010010179// Instantiate ClassLinker::ResolveMethod.
Vladimir Markoba118822017-06-12 15:41:56 +010010180template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
Andreas Gampe42ef8ab2015-12-03 17:27:32 -080010181 uint32_t method_idx,
10182 Handle<mirror::DexCache> dex_cache,
10183 Handle<mirror::ClassLoader> class_loader,
10184 ArtMethod* referrer,
10185 InvokeType type);
Vladimir Markoba118822017-06-12 15:41:56 +010010186template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
Andreas Gampe42ef8ab2015-12-03 17:27:32 -080010187 uint32_t method_idx,
10188 Handle<mirror::DexCache> dex_cache,
10189 Handle<mirror::ClassLoader> class_loader,
10190 ArtMethod* referrer,
10191 InvokeType type);
10192
Roland Levillain0e840272018-08-23 19:55:30 +010010193// Instantiate ClassLinker::AllocClass.
Andreas Gampe98ea9d92018-10-19 14:06:15 -070010194template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
Roland Levillain0e840272018-08-23 19:55:30 +010010195 Thread* self,
10196 ObjPtr<mirror::Class> java_lang_Class,
10197 uint32_t class_size);
Andreas Gampe98ea9d92018-10-19 14:06:15 -070010198template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
Roland Levillain0e840272018-08-23 19:55:30 +010010199 Thread* self,
10200 ObjPtr<mirror::Class> java_lang_Class,
10201 uint32_t class_size);
10202
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070010203} // namespace art