Refactor assertions in ConcurrentCopying::ProcessMarkStackRef.
Test: mmma art
Bug: 116087961
Change-Id: I420929bbb820582f09e45bfb9d4b09a054e8879d
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 1c9d051..07d1e0f 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -1651,36 +1651,15 @@
inline void ConcurrentCopying::ProcessMarkStackRef(mirror::Object* to_ref) {
DCHECK(!region_space_->IsInFromSpace(to_ref));
space::RegionSpace::RegionType rtype = region_space_->GetRegionType(to_ref);
- auto find_space_from_ref = [this] (mirror::Object* ref)
- REQUIRES_SHARED(Locks::mutator_lock_) -> space::Space* {
- for (const auto& space : heap_->GetContinuousSpaces()) {
- if (space->Contains(ref)) {
- return space;
- }
- }
- for (const auto& space : heap_->GetDiscontinuousSpaces()) {
- if (space->Contains(ref)) {
- return space;
- }
- }
- return nullptr;
- };
- if (kUseBakerReadBarrier &&
- kIsDebugBuild &&
- to_ref->GetReadBarrierState() != ReadBarrier::GrayState()) {
- space::Space* space = find_space_from_ref(to_ref);
- LOG(FATAL_WITHOUT_ABORT) << " " << to_ref
- << " " << to_ref->GetReadBarrierState()
- << " is_marked=" << IsMarked(to_ref)
- << " type=" << to_ref->PrettyTypeOf()
- << " is_young_gc=" << young_gen_;
- if (space == region_space_) {
- LOG(FATAL) << " region_type=" << rtype;
- } else if (space != nullptr) {
- LOG(FATAL) << " space=" << space->GetName();
- } else {
- LOG(FATAL) << "no space";
- }
+ if (kUseBakerReadBarrier) {
+ DCHECK(to_ref->GetReadBarrierState() == ReadBarrier::GrayState())
+ << " to_ref=" << to_ref
+ << " rb_state=" << to_ref->GetReadBarrierState()
+ << " is_marked=" << IsMarked(to_ref)
+ << " type=" << to_ref->PrettyTypeOf()
+ << " young_gen=" << std::boolalpha << young_gen_ << std::noboolalpha
+ << " space=" << heap_->DumpSpaceNameFromAddress(to_ref)
+ << " region_type=" << rtype;
}
bool add_to_live_bytes = false;
// Invariant: There should be no object from a newly-allocated
@@ -1716,22 +1695,15 @@
Scan<false>(to_ref);
}
}
- if (kUseBakerReadBarrier &&
- kIsDebugBuild &&
- to_ref->GetReadBarrierState() != ReadBarrier::GrayState()) {
- space::Space* space = find_space_from_ref(to_ref);
- LOG(FATAL_WITHOUT_ABORT) << " " << to_ref
- << " " << to_ref->GetReadBarrierState()
- << " is_marked=" << IsMarked(to_ref)
- << " type=" << to_ref->PrettyTypeOf()
- << " is_young_gc=" << young_gen_;
- if (space == region_space_) {
- LOG(FATAL) << " region_type=" << rtype;
- } else if (space != nullptr) {
- LOG(FATAL) << " space=" << space->GetName();
- } else {
- LOG(FATAL) << "no space";
- }
+ if (kUseBakerReadBarrier) {
+ DCHECK(to_ref->GetReadBarrierState() == ReadBarrier::GrayState())
+ << " to_ref=" << to_ref
+ << " rb_state=" << to_ref->GetReadBarrierState()
+ << " is_marked=" << IsMarked(to_ref)
+ << " type=" << to_ref->PrettyTypeOf()
+ << " young_gen=" << std::boolalpha << young_gen_ << std::noboolalpha
+ << " space=" << heap_->DumpSpaceNameFromAddress(to_ref)
+ << " region_type=" << rtype;
}
#ifdef USE_BAKER_OR_BROOKS_READ_BARRIER
mirror::Object* referent = nullptr;
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index a31cbe7..a77b3fb 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1270,6 +1270,10 @@
return nullptr;
}
+std::string Heap::DumpSpaceNameFromAddress(const void* addr) const {
+ space::Space* space = FindSpaceFromAddress(addr);
+ return (space != nullptr) ? space->GetName() : "no space";
+}
void Heap::ThrowOutOfMemoryError(Thread* self, size_t byte_count, AllocatorType allocator_type) {
// If we're in a stack overflow, do not create a new exception. It would require running the
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 6c4b936..c3ee526 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -554,6 +554,9 @@
space::Space* FindSpaceFromAddress(const void* ptr) const
REQUIRES_SHARED(Locks::mutator_lock_);
+ std::string DumpSpaceNameFromAddress(const void* addr) const
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
void DumpForSigQuit(std::ostream& os) REQUIRES(!*gc_complete_lock_);
// Do a pending collector transition.