Revert "Revert "Basic structural redefinition support""
This reverts commit 5a2301d897294ff4ee6de71f459dc2566dc3fa1a.
Bug: 134162467
Reason for revert: Relanding as unclear if issue is due to topic.
Change-Id: Ib1d1cf2e9132e30c9649b760ae9ae2d8ceacf843
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index d8bcb02..646f73d 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -16,12 +16,15 @@
#include "art_method.h"
+#include <algorithm>
#include <cstddef>
#include "android-base/stringprintf.h"
#include "arch/context.h"
#include "art_method-inl.h"
+#include "base/enums.h"
+#include "base/stl_util.h"
#include "class_linker-inl.h"
#include "class_root.h"
#include "debugger.h"
@@ -106,26 +109,32 @@
}
ObjPtr<mirror::DexCache> ArtMethod::GetObsoleteDexCache() {
+ PointerSize pointer_size = kRuntimePointerSize;
DCHECK(!Runtime::Current()->IsAotCompiler()) << PrettyMethod();
DCHECK(IsObsolete());
ObjPtr<mirror::ClassExt> ext(GetDeclaringClass()->GetExtData());
- CHECK(!ext.IsNull());
- ObjPtr<mirror::PointerArray> obsolete_methods(ext->GetObsoleteMethods());
- CHECK(!obsolete_methods.IsNull());
- DCHECK(ext->GetObsoleteDexCaches() != nullptr);
- int32_t len = obsolete_methods->GetLength();
- DCHECK_EQ(len, ext->GetObsoleteDexCaches()->GetLength());
+ ObjPtr<mirror::PointerArray> obsolete_methods(ext.IsNull() ? nullptr : ext->GetObsoleteMethods());
+ int32_t len = (obsolete_methods.IsNull() ? 0 : obsolete_methods->GetLength());
+ DCHECK(len == 0 || len == ext->GetObsoleteDexCaches()->GetLength())
+ << "len=" << len << " ext->GetObsoleteDexCaches()=" << ext->GetObsoleteDexCaches();
// Using kRuntimePointerSize (instead of using the image's pointer size) is fine since images
// should never have obsolete methods in them so they should always be the same.
- PointerSize pointer_size = kRuntimePointerSize;
- DCHECK_EQ(kRuntimePointerSize, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
+ DCHECK_EQ(pointer_size, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
for (int32_t i = 0; i < len; i++) {
if (this == obsolete_methods->GetElementPtrSize<ArtMethod*>(i, pointer_size)) {
return ext->GetObsoleteDexCaches()->Get(i);
}
}
- LOG(FATAL) << "This method does not appear in the obsolete map of its class!";
- UNREACHABLE();
+ CHECK(GetDeclaringClass()->IsObsoleteObject())
+ << "This non-structurally obsolete method does not appear in the obsolete map of its class: "
+ << GetDeclaringClass()->PrettyClass() << " Searched " << len << " caches.";
+ CHECK_EQ(this,
+ std::clamp(this,
+ &(*GetDeclaringClass()->GetMethods(pointer_size).begin()),
+ &(*GetDeclaringClass()->GetMethods(pointer_size).end())))
+ << "class is marked as structurally obsolete method but not found in normal obsolete-map "
+ << "despite not being the original method pointer for " << GetDeclaringClass()->PrettyClass();
+ return GetDeclaringClass()->GetDexCache();
}
uint16_t ArtMethod::FindObsoleteDexClassDefIndex() {
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 9d9abe3..10b9da1 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -6146,7 +6146,7 @@
// Update CHA info based on whether we override methods.
// Have to do this before setting the class as resolved which allows
// instantiation of klass.
- if (cha_ != nullptr) {
+ if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
cha_->UpdateAfterLoadingOf(klass);
}
@@ -6177,7 +6177,7 @@
ObjectLock<mirror::Class> lock(self, h_new_class);
FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
- {
+ if (LIKELY(descriptor != nullptr)) {
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
ClassTable* const table = InsertClassTableForClassLoader(class_loader);
@@ -6197,7 +6197,7 @@
// Update CHA info based on whether we override methods.
// Have to do this before setting the class as resolved which allows
// instantiation of klass.
- if (cha_ != nullptr) {
+ if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
cha_->UpdateAfterLoadingOf(h_new_class);
}
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index dd9f56f..792f7b7 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -20,6 +20,7 @@
#include <list>
#include <set>
#include <string>
+#include <type_traits>
#include <unordered_map>
#include <unordered_set>
#include <utility>
@@ -479,6 +480,38 @@
LinearAlloc* allocator,
size_t length);
+ // Convenience AllocClass() overload that uses mirror::Class::InitializeClassVisitor
+ // for the class initialization and uses the `java_lang_Class` from class roots
+ // instead of an explicit argument.
+ ObjPtr<mirror::Class> AllocClass(Thread* self, uint32_t class_size)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
+
+ // Setup the classloader, class def index, type idx so that we can insert this class in the class
+ // table.
+ void SetupClass(const DexFile& dex_file,
+ const dex::ClassDef& dex_class_def,
+ Handle<mirror::Class> klass,
+ ObjPtr<mirror::ClassLoader> class_loader)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ void LoadClass(Thread* self,
+ const DexFile& dex_file,
+ const dex::ClassDef& dex_class_def,
+ Handle<mirror::Class> klass)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ // Link the class and place it into the class-table using the given descriptor. NB if the
+ // descriptor is null the class will not be placed in any class-table. This is useful implementing
+ // obsolete classes and should not be used otherwise.
+ bool LinkClass(Thread* self,
+ const char* descriptor,
+ Handle<mirror::Class> klass,
+ Handle<mirror::ObjectArray<mirror::Class>> interfaces,
+ MutableHandle<mirror::Class>* h_new_class_out)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Locks::classlinker_classes_lock_);
+
ObjPtr<mirror::PointerArray> AllocPointerArray(Thread* self, size_t length)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Roles::uninterruptible_);
@@ -829,13 +862,6 @@
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Roles::uninterruptible_);
- // Convenience AllocClass() overload that uses mirror::Class::InitializeClassVisitor
- // for the class initialization and uses the `java_lang_Class` from class roots
- // instead of an explicit argument.
- ObjPtr<mirror::Class> AllocClass(Thread* self, uint32_t class_size)
- REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(!Roles::uninterruptible_);
-
// Allocate a primitive array class and store it in appropriate class root.
void AllocPrimitiveArrayClass(Thread* self,
ClassRoot primitive_root,
@@ -889,20 +915,6 @@
uint32_t SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
const dex::ClassDef& dex_class_def);
- // Setup the classloader, class def index, type idx so that we can insert this class in the class
- // table.
- void SetupClass(const DexFile& dex_file,
- const dex::ClassDef& dex_class_def,
- Handle<mirror::Class> klass,
- ObjPtr<mirror::ClassLoader> class_loader)
- REQUIRES_SHARED(Locks::mutator_lock_);
-
- void LoadClass(Thread* self,
- const DexFile& dex_file,
- const dex::ClassDef& dex_class_def,
- Handle<mirror::Class> klass)
- REQUIRES_SHARED(Locks::mutator_lock_);
-
void LoadField(const ClassAccessor::Field& field, Handle<mirror::Class> klass, ArtField* dst)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1049,14 +1061,6 @@
ObjPtr<mirror::Class> klass2)
REQUIRES_SHARED(Locks::mutator_lock_);
- bool LinkClass(Thread* self,
- const char* descriptor,
- Handle<mirror::Class> klass,
- Handle<mirror::ObjectArray<mirror::Class>> interfaces,
- MutableHandle<mirror::Class>* h_new_class_out)
- REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(!Locks::classlinker_classes_lock_);
-
bool LinkSuperClass(Handle<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 4377d7e..6299e3f 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -613,6 +613,7 @@
ClassExtOffsets() : CheckOffsets<mirror::ClassExt>(false, "Ldalvik/system/ClassExt;") {
addOffset(OFFSETOF_MEMBER(mirror::ClassExt, instance_jfield_ids_), "instanceJfieldIDs");
addOffset(OFFSETOF_MEMBER(mirror::ClassExt, jmethod_ids_), "jmethodIDs");
+ addOffset(OFFSETOF_MEMBER(mirror::ClassExt, obsolete_class_), "obsoleteClass");
addOffset(OFFSETOF_MEMBER(mirror::ClassExt, obsolete_dex_caches_), "obsoleteDexCaches");
addOffset(OFFSETOF_MEMBER(mirror::ClassExt, obsolete_methods_), "obsoleteMethods");
addOffset(OFFSETOF_MEMBER(mirror::ClassExt, original_dex_file_), "originalDexFile");
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index 76065a3..ca48955 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -156,7 +156,8 @@
}
Instrumentation::Instrumentation()
- : instrumentation_stubs_installed_(false),
+ : current_force_deopt_id_(0),
+ instrumentation_stubs_installed_(false),
entry_exit_stubs_installed_(false),
interpreter_stubs_installed_(false),
interpret_only_(false),
@@ -282,16 +283,20 @@
// deoptimization of quick frames to interpreter frames.
// Since we may already have done this previously, we need to push new instrumentation frame before
// existing instrumentation frames.
-static void InstrumentationInstallStack(Thread* thread, void* arg)
+void InstrumentationInstallStack(Thread* thread, void* arg)
REQUIRES_SHARED(Locks::mutator_lock_) {
struct InstallStackVisitor final : public StackVisitor {
- InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
+ InstallStackVisitor(Thread* thread_in,
+ Context* context,
+ uintptr_t instrumentation_exit_pc,
+ uint64_t force_deopt_id)
: StackVisitor(thread_in, context, kInstrumentationStackWalk),
instrumentation_stack_(thread_in->GetInstrumentationStack()),
instrumentation_exit_pc_(instrumentation_exit_pc),
- reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
- last_return_pc_(0) {
- }
+ reached_existing_instrumentation_frames_(false),
+ instrumentation_stack_depth_(0),
+ last_return_pc_(0),
+ force_deopt_id_(force_deopt_id) {}
bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
ArtMethod* m = GetMethod();
@@ -308,7 +313,8 @@
m,
/*return_pc=*/ 0,
GetFrameId(),
- interpreter_frame);
+ interpreter_frame,
+ force_deopt_id_);
if (kVerboseInstrumentation) {
LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
}
@@ -375,7 +381,8 @@
m,
return_pc,
GetFrameId(), // A runtime method still gets a frame id.
- false);
+ false,
+ force_deopt_id_);
if (kVerboseInstrumentation) {
LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
}
@@ -409,6 +416,7 @@
bool reached_existing_instrumentation_frames_;
size_t instrumentation_stack_depth_;
uintptr_t last_return_pc_;
+ uint64_t force_deopt_id_;
};
if (kVerboseInstrumentation) {
std::string thread_name;
@@ -419,7 +427,8 @@
Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
std::unique_ptr<Context> context(Context::Create());
uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
- InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
+ InstallStackVisitor visitor(
+ thread, context.get(), instrumentation_exit_pc, instrumentation->current_force_deopt_id_);
visitor.WalkStack(true);
CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
@@ -542,6 +551,17 @@
}
}
+void Instrumentation::DeoptimizeAllThreadFrames() {
+ Thread* self = Thread::Current();
+ MutexLock mu(self, *Locks::thread_list_lock_);
+ ThreadList* tl = Runtime::Current()->GetThreadList();
+ tl->ForEach([&](Thread* t) {
+ Locks::mutator_lock_->AssertExclusiveHeld(self);
+ InstrumentThreadStack(t);
+ });
+ current_force_deopt_id_++;
+}
+
static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
return (events & expected) != 0;
}
@@ -803,10 +823,24 @@
}
if (empty) {
MutexLock mu(self, *Locks::thread_list_lock_);
- Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
- // Only do this after restoring, as walking the stack when restoring will see
- // the instrumentation exit pc.
- instrumentation_stubs_installed_ = false;
+ bool no_remaining_deopts = true;
+ // Check that there are no other forced deoptimizations. Do it here so we only need to lock
+ // thread_list_lock once.
+ runtime->GetThreadList()->ForEach([&](Thread* t) {
+ no_remaining_deopts =
+ no_remaining_deopts && !t->IsForceInterpreter() &&
+ std::all_of(t->GetInstrumentationStack()->cbegin(),
+ t->GetInstrumentationStack()->cend(),
+ [&](const auto& frame) REQUIRES_SHARED(Locks::mutator_lock_) {
+ return frame.force_deopt_id_ == current_force_deopt_id_;
+ });
+ });
+ if (no_remaining_deopts) {
+ Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
+ // Only do this after restoring, as walking the stack when restoring will see
+ // the instrumentation exit pc.
+ instrumentation_stubs_installed_ = false;
+ }
}
}
}
@@ -1401,8 +1435,8 @@
DCHECK(!self->IsExceptionPending());
size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
- instrumentation::InstrumentationStackFrame instrumentation_frame(h_this.Get(), method, lr,
- frame_id, interpreter_entry);
+ instrumentation::InstrumentationStackFrame instrumentation_frame(
+ h_this.Get(), method, lr, frame_id, interpreter_entry, current_force_deopt_id_);
stack->push_front(instrumentation_frame);
}
@@ -1563,6 +1597,13 @@
bool deoptimize = (visitor.caller != nullptr) &&
(interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
self->IsForceInterpreter() ||
+ // NB Since structurally obsolete compiled methods might have the offsets of
+ // methods/fields compiled in we need to go back to interpreter whenever we hit
+ // them.
+ visitor.caller->GetDeclaringClass()->IsObsoleteObject() ||
+ // Check if we forced all threads to deoptimize in the time between this frame
+ // being created and now.
+ instrumentation_frame.force_deopt_id_ != current_force_deopt_id_ ||
Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
if (is_ref) {
// Restore the return value if it's a reference since it might have moved.
@@ -1628,7 +1669,8 @@
std::string InstrumentationStackFrame::Dump() const {
std::ostringstream os;
os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
- << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
+ << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_)
+ << " force_deopt_id=" << force_deopt_id_;
return os.str();
}
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index a7907c8..82e1a13 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -536,6 +536,10 @@
void InstrumentThreadStack(Thread* thread)
REQUIRES_SHARED(Locks::mutator_lock_);
+ // Force all currently running frames to be deoptimized back to interpreter. This should only be
+ // used in cases where basically all compiled code has been invalidated.
+ void DeoptimizeAllThreadFrames() REQUIRES(art::Locks::mutator_lock_);
+
static size_t ComputeFrameId(Thread* self,
size_t frame_depth,
size_t inlined_frames_before_frame)
@@ -643,6 +647,11 @@
return deoptimized_methods_lock_.get();
}
+ // A counter that's incremented every time a DeoptimizeAllFrames. We check each
+ // InstrumentationStackFrames creation id against this number and if they differ we deopt even if
+ // we could otherwise continue running.
+ uint64_t current_force_deopt_id_ GUARDED_BY(Locks::mutator_lock_);
+
// Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
bool instrumentation_stubs_installed_;
@@ -746,6 +755,7 @@
friend class InstrumentationTest; // For GetCurrentInstrumentationLevel and ConfigureStubs.
friend class InstrumentationStackPopper; // For popping instrumentation frames.
+ friend void InstrumentationInstallStack(Thread*, void*);
DISALLOW_COPY_AND_ASSIGN(Instrumentation);
};
@@ -758,12 +768,14 @@
ArtMethod* method,
uintptr_t return_pc,
size_t frame_id,
- bool interpreter_entry)
+ bool interpreter_entry,
+ uint64_t force_deopt_id)
: this_object_(this_object),
method_(method),
return_pc_(return_pc),
frame_id_(frame_id),
- interpreter_entry_(interpreter_entry) {
+ interpreter_entry_(interpreter_entry),
+ force_deopt_id_(force_deopt_id) {
}
std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
@@ -773,6 +785,7 @@
uintptr_t return_pc_;
size_t frame_id_;
bool interpreter_entry_;
+ uint64_t force_deopt_id_;
};
} // namespace instrumentation
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index c0342ba..ecfe9b6 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -1686,6 +1686,28 @@
}
}
+void JitCodeCache::InvalidateAllCompiledCode() {
+ art::MutexLock mu(Thread::Current(), *Locks::jit_lock_);
+ size_t cnt = profiling_infos_.size();
+ size_t osr_size = osr_code_map_.size();
+ for (ProfilingInfo* pi : profiling_infos_) {
+ // NB Due to OSR we might run this on some methods multiple times but this should be fine.
+ ArtMethod* meth = pi->GetMethod();
+ pi->SetSavedEntryPoint(nullptr);
+ // We had a ProfilingInfo so we must be warm.
+ ClearMethodCounter(meth, /*was_warm=*/true);
+ ClassLinker* linker = Runtime::Current()->GetClassLinker();
+ if (meth->IsObsolete()) {
+ linker->SetEntryPointsForObsoleteMethod(meth);
+ } else {
+ linker->SetEntryPointsToInterpreter(meth);
+ }
+ }
+ osr_code_map_.clear();
+ VLOG(jit) << "Invalidated the compiled code of " << (cnt - osr_size) << " methods and "
+ << osr_size << " OSRs.";
+}
+
void JitCodeCache::InvalidateCompiledCodeFor(ArtMethod* method,
const OatQuickMethodHeader* header) {
DCHECK(!method->IsNative());
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index 64607b6..154700f 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -274,6 +274,10 @@
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
+ void InvalidateAllCompiledCode()
+ REQUIRES(!Locks::jit_lock_)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
void InvalidateCompiledCodeFor(ArtMethod* method, const OatQuickMethodHeader* code)
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/jni/jni_id_manager.cc b/runtime/jni/jni_id_manager.cc
index 88c6ba1..9ae8c89 100644
--- a/runtime/jni/jni_id_manager.cc
+++ b/runtime/jni/jni_id_manager.cc
@@ -209,6 +209,7 @@
}
}
+// TODO need to fix races in here with visitors
template <typename ArtType> uintptr_t JniIdManager::EncodeGenericId(ArtType* t) {
Runtime* runtime = Runtime::Current();
JniIdType id_type = runtime->GetJniIdType();
@@ -309,6 +310,22 @@
return res;
}
+void JniIdManager::VisitIds(Thread* self, JniIdManager::IdVisitor* visitor) {
+ art::WriterMutexLock mu(self, *Locks::jni_id_lock_);
+ if (visitor->ShouldVisitFields()) {
+ for (auto it = field_id_map_.begin(); it != field_id_map_.end(); ++it) {
+ visitor->VisitFieldId(
+ reinterpret_cast<jfieldID>(IndexToId(std::distance(field_id_map_.begin(), it))), &*it);
+ }
+ }
+ if (visitor->ShouldVisitMethods()) {
+ for (auto it = method_id_map_.begin(); it != method_id_map_.end(); ++it) {
+ visitor->VisitMethodId(
+ reinterpret_cast<jmethodID>(IndexToId(std::distance(method_id_map_.begin(), it))), &*it);
+ }
+ }
+}
+
template <typename ArtType> ArtType* JniIdManager::DecodeGenericId(uintptr_t t) {
if (Runtime::Current()->GetJniIdType() == JniIdType::kIndices && (t % 2) == 1) {
ReaderMutexLock mu(Thread::Current(), *Locks::jni_id_lock_);
diff --git a/runtime/jni/jni_id_manager.h b/runtime/jni/jni_id_manager.h
index d294815..7b2f3c4 100644
--- a/runtime/jni/jni_id_manager.h
+++ b/runtime/jni/jni_id_manager.h
@@ -32,6 +32,22 @@
class ScopedEnableSuspendAllJniIdQueries;
class JniIdManager {
public:
+ class IdVisitor {
+ public:
+ virtual ~IdVisitor() {}
+ virtual void VisitMethodId(jmethodID id, ArtMethod** method) = 0;
+ virtual void VisitFieldId(jfieldID id, ArtField** field) = 0;
+ virtual bool ShouldVisitFields() = 0;
+ virtual bool ShouldVisitMethods() = 0;
+ };
+
+ template <typename T,
+ typename = typename std::enable_if<std::is_same_v<T, jmethodID> ||
+ std::is_same_v<T, jfieldID>>>
+ static constexpr bool IsIndexId(T val) {
+ return val == nullptr || reinterpret_cast<uintptr_t>(val) % 2 == 1;
+ }
+
ArtMethod* DecodeMethodId(jmethodID method) REQUIRES(!Locks::jni_id_lock_);
ArtField* DecodeFieldId(jfieldID field) REQUIRES(!Locks::jni_id_lock_);
jmethodID EncodeMethodId(ArtMethod* method) REQUIRES(!Locks::jni_id_lock_)
@@ -39,6 +55,34 @@
jfieldID EncodeFieldId(ArtField* field) REQUIRES(!Locks::jni_id_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
+ void VisitIds(Thread* self, IdVisitor* visitor);
+
+ template<typename MethodVisitor, typename FieldVisitor>
+ void VisitIds(Thread* self, MethodVisitor m, FieldVisitor f) REQUIRES(!Locks::jni_id_lock_) {
+ struct FuncVisitor : public IdVisitor {
+ public:
+ FuncVisitor(MethodVisitor m, FieldVisitor f) : m_(m), f_(f) {}
+ bool ShouldVisitFields() override {
+ return true;
+ }
+ bool ShouldVisitMethods() override {
+ return true;
+ }
+ void VisitMethodId(jmethodID mid, ArtMethod** am) NO_THREAD_SAFETY_ANALYSIS override {
+ m_(mid, am);
+ }
+ void VisitFieldId(jfieldID fid, ArtField** af) NO_THREAD_SAFETY_ANALYSIS override {
+ f_(fid, af);
+ }
+
+ private:
+ MethodVisitor m_;
+ FieldVisitor f_;
+ };
+ FuncVisitor fv(m, f);
+ VisitIds(self, &fv);
+ }
+
private:
template <typename ArtType>
uintptr_t EncodeGenericId(ArtType* t) REQUIRES(!Locks::jni_id_lock_)
diff --git a/runtime/mirror/class-refvisitor-inl.h b/runtime/mirror/class-refvisitor-inl.h
index 263b774..8c85387 100644
--- a/runtime/mirror/class-refvisitor-inl.h
+++ b/runtime/mirror/class-refvisitor-inl.h
@@ -53,20 +53,14 @@
template<ReadBarrierOption kReadBarrierOption, class Visitor>
void Class::VisitNativeRoots(Visitor& visitor, PointerSize pointer_size) {
- for (ArtField& field : GetSFieldsUnchecked()) {
- // Visit roots first in case the declaring class gets moved.
- field.VisitRoots(visitor);
+ VisitFields<kReadBarrierOption>([&](ArtField* field) REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ field->VisitRoots(visitor);
if (kIsDebugBuild && IsResolved()) {
- CHECK_EQ(field.GetDeclaringClass<kReadBarrierOption>(), this) << GetStatus();
+ CHECK_EQ(field->GetDeclaringClass<kReadBarrierOption>(), this)
+ << GetStatus() << field->GetDeclaringClass()->PrettyClass() << " != " << PrettyClass();
}
- }
- for (ArtField& field : GetIFieldsUnchecked()) {
- // Visit roots first in case the declaring class gets moved.
- field.VisitRoots(visitor);
- if (kIsDebugBuild && IsResolved()) {
- CHECK_EQ(field.GetDeclaringClass<kReadBarrierOption>(), this) << GetStatus();
- }
- }
+ });
+ // Don't use VisitMethods because we don't want to hit the class-ext methods twice.
for (ArtMethod& method : GetMethods(pointer_size)) {
method.VisitRoots<kReadBarrierOption>(visitor, pointer_size);
}
@@ -76,6 +70,27 @@
}
}
+template<ReadBarrierOption kReadBarrierOption, class Visitor>
+void Class::VisitMethods(Visitor visitor, PointerSize pointer_size) {
+ for (ArtMethod& method : GetMethods(pointer_size)) {
+ visitor(&method);
+ }
+ ObjPtr<ClassExt> ext(GetExtData<kDefaultVerifyFlags, kReadBarrierOption>());
+ if (!ext.IsNull()) {
+ ext->VisitMethods<kReadBarrierOption, Visitor>(visitor, pointer_size);
+ }
+}
+
+template<ReadBarrierOption kReadBarrierOption, class Visitor>
+void Class::VisitFields(Visitor visitor) {
+ for (ArtField& sfield : GetSFieldsUnchecked()) {
+ visitor(&sfield);
+ }
+ for (ArtField& ifield : GetIFieldsUnchecked()) {
+ visitor(&ifield);
+ }
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 7dff9df..455f98d 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -1539,6 +1539,12 @@
std::string Class::PrettyClass() {
std::string result;
+ if (IsObsoleteObject()) {
+ result += "(Obsolete)";
+ }
+ if (IsRetired()) {
+ result += "(Retired)";
+ }
result += "java.lang.Class<";
result += PrettyDescriptor();
result += ">";
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 960f49c..6ed20ed 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -313,6 +313,17 @@
}
}
+ bool IsObsoleteObject() REQUIRES_SHARED(Locks::mutator_lock_) {
+ return (GetAccessFlags() & kAccObsoleteObject) != 0;
+ }
+
+ void SetObsoleteObject() REQUIRES_SHARED(Locks::mutator_lock_) {
+ uint32_t flags = GetField32(OFFSET_OF_OBJECT_MEMBER(Class, access_flags_));
+ if ((flags & kAccObsoleteObject) == 0) {
+ SetAccessFlags(flags | kAccObsoleteObject);
+ }
+ }
+
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
bool IsTypeOfReferenceClass() REQUIRES_SHARED(Locks::mutator_lock_) {
return (GetClassFlags<kVerifyFlags>() & kClassFlagReference) != 0;
@@ -1110,6 +1121,15 @@
void VisitNativeRoots(Visitor& visitor, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ // Visit ArtMethods directly owned by this class.
+ template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, class Visitor>
+ void VisitMethods(Visitor visitor, PointerSize pointer_size)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ // Visit ArtFields directly owned by this class.
+ template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, class Visitor>
+ void VisitFields(Visitor visitor) REQUIRES_SHARED(Locks::mutator_lock_);
+
// Get one of the primitive classes.
static ObjPtr<mirror::Class> GetPrimitiveClass(ObjPtr<mirror::String> name)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/mirror/class_ext-inl.h b/runtime/mirror/class_ext-inl.h
index ead02ee..fd81a2a 100644
--- a/runtime/mirror/class_ext-inl.h
+++ b/runtime/mirror/class_ext-inl.h
@@ -21,8 +21,11 @@
#include "array-inl.h"
#include "art_method-inl.h"
+#include "base/enums.h"
#include "handle_scope.h"
+#include "mirror/object.h"
#include "object-inl.h"
+#include "verify_object.h"
namespace art {
namespace mirror {
@@ -89,6 +92,12 @@
}
template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline ObjPtr<Class> ClassExt::GetObsoleteClass() {
+ return GetFieldObject<Class, kVerifyFlags, kReadBarrierOption>(
+ OFFSET_OF_OBJECT_MEMBER(ClassExt, obsolete_class_));
+}
+
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline ObjPtr<PointerArray> ClassExt::GetJMethodIDs() {
return GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(
OFFSET_OF_OBJECT_MEMBER(ClassExt, jmethod_ids_));
@@ -116,15 +125,58 @@
template<ReadBarrierOption kReadBarrierOption, class Visitor>
void ClassExt::VisitNativeRoots(Visitor& visitor, PointerSize pointer_size) {
+ VisitMethods<kReadBarrierOption>([&](ArtMethod* method) {
+ method->VisitRoots<kReadBarrierOption>(visitor, pointer_size);
+ }, pointer_size);
+}
+
+template<ReadBarrierOption kReadBarrierOption, class Visitor>
+void ClassExt::VisitMethods(Visitor visitor, PointerSize pointer_size) {
ObjPtr<PointerArray> arr(GetObsoleteMethods<kDefaultVerifyFlags, kReadBarrierOption>());
- if (arr.IsNull()) {
- return;
+ if (!arr.IsNull()) {
+ int32_t len = arr->GetLength();
+ for (int32_t i = 0; i < len; i++) {
+ ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, pointer_size);
+ if (method != nullptr) {
+ visitor(method);
+ }
+ }
}
- int32_t len = arr->GetLength();
- for (int32_t i = 0; i < len; i++) {
- ArtMethod* method = arr->GetElementPtrSize<ArtMethod*, kDefaultVerifyFlags>(i, pointer_size);
- if (method != nullptr) {
- method->VisitRoots<kReadBarrierOption>(visitor, pointer_size);
+}
+
+template<ReadBarrierOption kReadBarrierOption, class Visitor>
+void ClassExt::VisitJMethodIDs(Visitor v) {
+ ObjPtr<PointerArray> marr(GetJMethodIDs<kDefaultVerifyFlags, kReadBarrierOption>());
+ if (!marr.IsNull()) {
+ int32_t len = marr->GetLength();
+ for (int32_t i = 0; i < len; i++) {
+ jmethodID id = marr->GetElementPtrSize<jmethodID>(i, kRuntimePointerSize);
+ if (id != nullptr) {
+ v(id, i);
+ }
+ }
+ }
+}
+template<ReadBarrierOption kReadBarrierOption, class Visitor>
+void ClassExt::VisitJFieldIDs(Visitor v) {
+ ObjPtr<PointerArray> sarr(GetStaticJFieldIDs<kDefaultVerifyFlags, kReadBarrierOption>());
+ if (!sarr.IsNull()) {
+ int32_t len = sarr->GetLength();
+ for (int32_t i = 0; i < len; i++) {
+ jfieldID id = sarr->GetElementPtrSize<jfieldID>(i, kRuntimePointerSize);
+ if (id != nullptr) {
+ v(id, i, true);
+ }
+ }
+ }
+ ObjPtr<PointerArray> iarr(GetInstanceJFieldIDs<kDefaultVerifyFlags, kReadBarrierOption>());
+ if (!iarr.IsNull()) {
+ int32_t len = iarr->GetLength();
+ for (int32_t i = 0; i < len; i++) {
+ jfieldID id = iarr->GetElementPtrSize<jfieldID>(i, kRuntimePointerSize);
+ if (id != nullptr) {
+ v(id, i, false);
+ }
}
}
}
diff --git a/runtime/mirror/class_ext.cc b/runtime/mirror/class_ext.cc
index 4c6cb4d..27dcea8 100644
--- a/runtime/mirror/class_ext.cc
+++ b/runtime/mirror/class_ext.cc
@@ -25,6 +25,8 @@
#include "class_root.h"
#include "dex/dex_file-inl.h"
#include "gc/accounting/card_table-inl.h"
+#include "mirror/object.h"
+#include "mirror/object_array.h"
#include "object-inl.h"
#include "object_array-alloc-inl.h"
#include "object_array-inl.h"
@@ -101,6 +103,10 @@
return true;
}
+void ClassExt::SetObsoleteClass(ObjPtr<Class> klass) {
+ SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(ClassExt, obsolete_class_), klass);
+}
+
ObjPtr<ClassExt> ClassExt::Alloc(Thread* self) {
return ObjPtr<ClassExt>::DownCast(GetClassRoot<ClassExt>()->AllocObject(self));
}
diff --git a/runtime/mirror/class_ext.h b/runtime/mirror/class_ext.h
index 6fb225f..eb4047b 100644
--- a/runtime/mirror/class_ext.h
+++ b/runtime/mirror/class_ext.h
@@ -106,8 +106,26 @@
inline void VisitNativeRoots(Visitor& visitor, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, class Visitor>
+ inline void VisitMethods(Visitor visitor, PointerSize pointer_size)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
static ObjPtr<ClassExt> Alloc(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
+ // TODO Save the obsolete class, if we have one.
+ // TODO We need this so jit-cleanup can work. the obsolete class might get cleaned up early
+ // otherwise. We should remove the need for this.
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+ ObjPtr<Class> GetObsoleteClass() REQUIRES_SHARED(Locks::mutator_lock_);
+ void SetObsoleteClass(ObjPtr<Class> classes) REQUIRES_SHARED(Locks::mutator_lock_);
+
+ template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
+ inline void VisitJFieldIDs(Visitor v) REQUIRES_SHARED(Locks::mutator_lock_);
+
+ template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
+ inline void VisitJMethodIDs(Visitor v) REQUIRES_SHARED(Locks::mutator_lock_);
+
private:
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
@@ -123,6 +141,9 @@
// the classes methods_ array or '0' if no id has been assigned to that method yet.
HeapReference<PointerArray> jmethod_ids_;
+ // If set this is the Class object that was being used before a structural redefinition occurred.
+ HeapReference<Class> obsolete_class_;
+
HeapReference<ObjectArray<DexCache>> obsolete_dex_caches_;
HeapReference<PointerArray> obsolete_methods_;
@@ -137,8 +158,8 @@
HeapReference<Object> verify_error_;
// Native pointer to DexFile and ClassDef index of this class before it was JVMTI-redefined.
- int32_t pre_redefine_class_def_index_;
int64_t pre_redefine_dex_file_ptr_;
+ int32_t pre_redefine_class_def_index_;
friend struct art::ClassExtOffsets; // for verifying offset information
DISALLOW_IMPLICIT_CONSTRUCTORS(ClassExt);
diff --git a/runtime/mirror/executable-inl.h b/runtime/mirror/executable-inl.h
index 6d4b46a..ce35d6e 100644
--- a/runtime/mirror/executable-inl.h
+++ b/runtime/mirror/executable-inl.h
@@ -20,6 +20,7 @@
#include "executable.h"
#include "object-inl.h"
+#include "verify_object.h"
namespace art {
namespace mirror {
@@ -36,6 +37,17 @@
return GetFieldObject<mirror::Class>(DeclaringClassOffset());
}
+template<typename Visitor, VerifyObjectFlags kVerifiyFlags>
+inline void Executable::VisitTarget(Visitor&& v) {
+ ArtMethod* orig = GetArtMethod<kVerifiyFlags>();
+ ArtMethod* new_target = v(orig);
+ if (orig != new_target) {
+ SetArtMethod(new_target);
+ SetDexMethodIndex(new_target->GetDexMethodIndex());
+ SetDeclaringClass(new_target->GetDeclaringClass());
+ }
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/executable.h b/runtime/mirror/executable.h
index a99c3ec..8eca206 100644
--- a/runtime/mirror/executable.h
+++ b/runtime/mirror/executable.h
@@ -41,6 +41,10 @@
return reinterpret_cast64<ArtMethod*>(GetField64<kVerifyFlags>(ArtMethodOffset()));
}
+ template <typename Visitor,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ inline void VisitTarget(Visitor&& v) REQUIRES(Locks::mutator_lock_);
+
template <bool kTransactionActive = false,
bool kCheckTransaction = true,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
@@ -61,6 +65,21 @@
uint32_t access_flags_;
uint32_t dex_method_index_;
+ template<bool kTransactionActive = false>
+ void SetDeclaringClass(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
+ SetFieldObject<kTransactionActive>(DeclaringClassOffset(), klass);
+ }
+
+ template<bool kTransactionActive = false>
+ void SetAccessFlags(uint32_t flags) REQUIRES_SHARED(Locks::mutator_lock_) {
+ SetField32<kTransactionActive>(AccessFlagsOffset(), flags);
+ }
+
+ template<bool kTransactionActive = false>
+ void SetDexMethodIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) {
+ SetField32<kTransactionActive>(DexMethodIndexOffset(), idx);
+ }
+
static MemberOffset DeclaringClassOffset() {
return MemberOffset(OFFSETOF_MEMBER(Executable, declaring_class_));
}
diff --git a/runtime/mirror/field-inl.h b/runtime/mirror/field-inl.h
index ac11be1..6e82d6d 100644
--- a/runtime/mirror/field-inl.h
+++ b/runtime/mirror/field-inl.h
@@ -104,6 +104,18 @@
SetFieldObject<kTransactionActive>(OFFSET_OF_OBJECT_MEMBER(Field, type_), type);
}
+template<typename Visitor>
+inline void Field::VisitTarget(Visitor&& v) {
+ ArtField* orig = GetArtField(/*use_dex_cache*/false);
+ ArtField* new_value = v(orig);
+ if (orig != new_value) {
+ SetDexFieldIndex<false>(new_value->GetDexFieldIndex());
+ SetOffset<false>(new_value->GetOffset().Int32Value());
+ SetDeclaringClass<false>(new_value->GetDeclaringClass());
+ }
+ DCHECK_EQ(new_value, GetArtField(/*use_dex_cache*/false));
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/field.cc b/runtime/mirror/field.cc
index f4d1e73..9a40006 100644
--- a/runtime/mirror/field.cc
+++ b/runtime/mirror/field.cc
@@ -24,7 +24,7 @@
namespace art {
namespace mirror {
-ArtField* Field::GetArtField() {
+ArtField* Field::GetArtField(bool use_dex_cache) {
ObjPtr<mirror::Class> declaring_class = GetDeclaringClass();
if (UNLIKELY(declaring_class->IsProxyClass())) {
DCHECK(IsStatic());
@@ -38,7 +38,9 @@
}
}
const ObjPtr<mirror::DexCache> dex_cache = declaring_class->GetDexCache();
- ArtField* art_field = dex_cache->GetResolvedField(GetDexFieldIndex(), kRuntimePointerSize);
+ ArtField* art_field = use_dex_cache
+ ? dex_cache->GetResolvedField(GetDexFieldIndex(), kRuntimePointerSize)
+ : nullptr;
if (UNLIKELY(art_field == nullptr)) {
if (IsStatic()) {
art_field = declaring_class->FindDeclaredStaticField(dex_cache, GetDexFieldIndex());
@@ -46,7 +48,9 @@
art_field = declaring_class->FindInstanceField(dex_cache, GetDexFieldIndex());
}
CHECK(art_field != nullptr);
- dex_cache->SetResolvedField(GetDexFieldIndex(), art_field, kRuntimePointerSize);
+ if (use_dex_cache) {
+ dex_cache->SetResolvedField(GetDexFieldIndex(), art_field, kRuntimePointerSize);
+ }
}
CHECK_EQ(declaring_class, art_field->GetDeclaringClass());
return art_field;
diff --git a/runtime/mirror/field.h b/runtime/mirror/field.h
index 6ba8dc6..89c86e3 100644
--- a/runtime/mirror/field.h
+++ b/runtime/mirror/field.h
@@ -68,8 +68,10 @@
return GetField32(OFFSET_OF_OBJECT_MEMBER(Field, offset_));
}
- // Slow, try to use only for PrettyField and such.
- ArtField* GetArtField() REQUIRES_SHARED(Locks::mutator_lock_);
+ // Slow, try to use only for PrettyField and such. Set use-dex-cache to false to not utilize the
+ // dex-cache when finding the art-field. This is useful for cases where the dex-cache might be
+ // temporarally invalid.
+ ArtField* GetArtField(bool use_dex_cache = true) REQUIRES_SHARED(Locks::mutator_lock_);
template <PointerSize kPointerSize, bool kTransactionActive = false>
static ObjPtr<mirror::Field> CreateFromArtField(Thread* self,
@@ -77,6 +79,12 @@
bool force_resolve)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
+
+ // Used to modify the target of this Field object, if required for structural redefinition or some
+ // other purpose.
+ template<typename Visitor>
+ inline void VisitTarget(Visitor&& v) REQUIRES(Locks::mutator_lock_);
+
private:
// Padding required for matching alignment with the Java peer.
uint8_t padding_[2];
diff --git a/runtime/mirror/method_handle_impl-inl.h b/runtime/mirror/method_handle_impl-inl.h
index 932b434..0085642 100644
--- a/runtime/mirror/method_handle_impl-inl.h
+++ b/runtime/mirror/method_handle_impl-inl.h
@@ -39,6 +39,20 @@
GetTargetMethod()->GetDeclaringClass() : GetTargetField()->GetDeclaringClass();
}
+template<typename Visitor>
+inline void MethodHandle::VisitTarget(Visitor&& v) {
+ void* target = GetTargetField();
+ void* result;
+ if (GetHandleKind() < kFirstAccessorKind) {
+ result = v(GetTargetMethod());
+ } else {
+ result = v(GetTargetField());
+ }
+ if (result != target) {
+ SetField64<false>(ArtFieldOrMethodOffset(), reinterpret_cast<uintptr_t>(result));
+ }
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/method_handle_impl.h b/runtime/mirror/method_handle_impl.h
index c973a24..357ec9d 100644
--- a/runtime/mirror/method_handle_impl.h
+++ b/runtime/mirror/method_handle_impl.h
@@ -87,6 +87,11 @@
// supported.
static const char* GetReturnTypeDescriptor(const char* invoke_method_name);
+ // Used when classes become structurally obsolete to change the MethodHandle to refer to the new
+ // method or field.
+ template<typename Visitor>
+ void VisitTarget(Visitor&& v) REQUIRES(Locks::mutator_lock_);
+
protected:
void Initialize(uintptr_t art_field_or_method, Kind kind, Handle<MethodType> method_type)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/mirror/var_handle-inl.h b/runtime/mirror/var_handle-inl.h
new file mode 100644
index 0000000..d3f582d
--- /dev/null
+++ b/runtime/mirror/var_handle-inl.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_MIRROR_VAR_HANDLE_INL_H_
+#define ART_RUNTIME_MIRROR_VAR_HANDLE_INL_H_
+
+#include "var_handle.h"
+
+namespace art {
+class ArtField;
+
+namespace mirror {
+
+template<typename Visitor>
+inline void FieldVarHandle::VisitTarget(Visitor&& v) {
+ ArtField* orig = GetField();
+ ArtField* new_value = v(orig);
+ if (orig != new_value) {
+ SetField64</*kTransactionActive*/ false>(ArtFieldOffset(),
+ reinterpret_cast<uintptr_t>(new_value));
+ }
+}
+
+} // namespace mirror
+} // namespace art
+
+#endif // ART_RUNTIME_MIRROR_VAR_HANDLE_INL_H_
diff --git a/runtime/mirror/var_handle.h b/runtime/mirror/var_handle.h
index a46b466..ac78d98 100644
--- a/runtime/mirror/var_handle.h
+++ b/runtime/mirror/var_handle.h
@@ -197,6 +197,10 @@
ArtField* GetField() REQUIRES_SHARED(Locks::mutator_lock_);
+ // Used for updating var-handles to obsolete fields.
+ template<typename Visitor>
+ inline void VisitTarget(Visitor&& v) REQUIRES(Locks::mutator_lock_);
+
private:
static MemberOffset ArtFieldOffset() {
return MemberOffset(OFFSETOF_MEMBER(FieldVarHandle, art_field_));
diff --git a/runtime/native/java_lang_Class.cc b/runtime/native/java_lang_Class.cc
index 4967f9e..4516d1b 100644
--- a/runtime/native/java_lang_Class.cc
+++ b/runtime/native/java_lang_Class.cc
@@ -218,6 +218,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass = hs.NewHandle(DecodeClass(soa, javaThis));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (klass->IsProxyClass()) {
StackHandleScope<1> hs2(soa.Self());
@@ -262,6 +266,10 @@
ObjPtr<mirror::Class> klass,
bool public_only,
bool force_resolve) REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (UNLIKELY(klass->IsObsoleteObject())) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
StackHandleScope<1> hs(self);
IterationRange<StrideIterator<ArtField>> ifields = klass->GetIFields();
IterationRange<StrideIterator<ArtField>> sfields = klass->GetSFields();
@@ -386,6 +394,10 @@
ObjPtr<mirror::Class> c,
ObjPtr<mirror::String> name)
REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (UNLIKELY(c->IsObsoleteObject())) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
ArtField* art_field = FindFieldByName(name, c->GetIFieldsPtr());
if (art_field != nullptr) {
return mirror::Field::CreateFromArtField<kRuntimePointerSize>(self, art_field, true);
@@ -404,6 +416,10 @@
DCHECK(name != nullptr);
DCHECK(self != nullptr);
+ if (UNLIKELY(clazz->IsObsoleteObject())) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
StackHandleScope<2> hs(self);
MutableHandle<mirror::Class> h_clazz(hs.NewHandle(clazz));
Handle<mirror::String> h_name(hs.NewHandle(name));
@@ -501,10 +517,15 @@
DCHECK(!Runtime::Current()->IsActiveTransaction());
StackHandleScope<1> hs(soa.Self());
+ ObjPtr<mirror::Class> klass = DecodeClass(soa, javaThis);
+ if (UNLIKELY(klass->IsObsoleteObject())) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
Handle<mirror::Constructor> result = hs.NewHandle(
mirror::Class::GetDeclaredConstructorInternal<kRuntimePointerSize, false>(
soa.Self(),
- DecodeClass(soa, javaThis),
+ klass,
soa.Decode<mirror::ObjectArray<mirror::Class>>(args)));
if (result == nullptr || ShouldDenyAccessToMember(result->GetArtMethod(), soa.Self())) {
return nullptr;
@@ -529,6 +550,10 @@
bool public_only = (publicOnly != JNI_FALSE);
hiddenapi::AccessContext hiddenapi_context = GetReflectionCaller(soa.Self());
Handle<mirror::Class> h_klass = hs.NewHandle(DecodeClass(soa, javaThis));
+ if (UNLIKELY(h_klass->IsObsoleteObject())) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
size_t constructor_count = 0;
// Two pass approach for speed.
for (auto& m : h_klass->GetDirectMethods(kRuntimePointerSize)) {
@@ -563,10 +588,15 @@
StackHandleScope<1> hs(soa.Self());
DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
DCHECK(!Runtime::Current()->IsActiveTransaction());
+ ObjPtr<mirror::Class> klass = DecodeClass(soa, javaThis);
+ if (UNLIKELY(klass->IsObsoleteObject())) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
Handle<mirror::Method> result = hs.NewHandle(
mirror::Class::GetDeclaredMethodInternal<kRuntimePointerSize, false>(
soa.Self(),
- DecodeClass(soa, javaThis),
+ klass,
soa.Decode<mirror::String>(name),
soa.Decode<mirror::ObjectArray<mirror::Class>>(args),
GetHiddenapiAccessContextFunction(soa.Self())));
@@ -585,6 +615,10 @@
bool public_only = (publicOnly != JNI_FALSE);
Handle<mirror::Class> klass = hs.NewHandle(DecodeClass(soa, javaThis));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
size_t num_methods = 0;
for (ArtMethod& m : klass->GetDeclaredMethods(kRuntimePointerSize)) {
uint32_t modifiers = m.GetAccessFlags();
@@ -623,6 +657,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<2> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
// Handle public contract to throw NPE if the "annotationClass" argument was null.
if (UNLIKELY(annotationClass == nullptr)) {
@@ -642,6 +680,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
// Return an empty array instead of a null pointer.
ObjPtr<mirror::Class> annotation_array_class =
@@ -659,6 +701,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
ObjPtr<mirror::ObjectArray<mirror::Class>> classes = nullptr;
if (!klass->IsProxyClass() && klass->GetDexCache() != nullptr) {
classes = annotations::GetDeclaredClasses(klass);
@@ -682,6 +728,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
return nullptr;
}
@@ -692,6 +742,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
return nullptr;
}
@@ -708,6 +762,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
return nullptr;
}
@@ -724,6 +782,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return 0;
+ }
return mirror::Class::GetInnerClassFlags(klass, defaultValue);
}
@@ -731,6 +793,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
return nullptr;
}
@@ -745,6 +811,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
return nullptr;
}
@@ -756,6 +826,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return 0;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
return false;
}
@@ -771,6 +845,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<2> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return false;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
return false;
}
@@ -782,6 +860,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> klass(hs.NewHandle(DecodeClass(soa, javaThis)));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (klass->IsProxyClass() || klass->GetDexCache() == nullptr) {
return nullptr;
}
@@ -796,6 +878,10 @@
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<4> hs(soa.Self());
Handle<mirror::Class> klass = hs.NewHandle(DecodeClass(soa, javaThis));
+ if (klass->IsObsoleteObject()) {
+ ThrowRuntimeException("Obsolete Object!");
+ return nullptr;
+ }
if (UNLIKELY(klass->GetPrimitiveType() != 0 || klass->IsInterface() || klass->IsArrayClass() ||
klass->IsAbstract())) {
soa.Self()->ThrowNewExceptionF("Ljava/lang/InstantiationException;",