Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame^] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 16 | |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 17 | #include "mark_sweep.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 18 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 19 | #include <climits> |
| 20 | #include <vector> |
| 21 | |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 22 | #include "class_loader.h" |
Brian Carlstrom | 693267a | 2011-09-06 09:25:34 -0700 | [diff] [blame] | 23 | #include "dex_cache.h" |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 24 | #include "heap.h" |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 25 | #include "indirect_reference_table.h" |
| 26 | #include "intern_table.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 27 | #include "logging.h" |
| 28 | #include "macros.h" |
| 29 | #include "mark_stack.h" |
Elliott Hughes | c33a32b | 2011-10-11 18:18:07 -0700 | [diff] [blame] | 30 | #include "monitor.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 31 | #include "object.h" |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 32 | #include "runtime.h" |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 33 | #include "space.h" |
Elliott Hughes | 307f75d | 2011-10-12 18:04:40 -0700 | [diff] [blame] | 34 | #include "timing_logger.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 35 | #include "thread.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 36 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 37 | namespace art { |
| 38 | |
Jesse Wilson | 078f9b0 | 2011-11-18 17:51:47 -0500 | [diff] [blame] | 39 | void MarkSweep::Init() { |
Brian Carlstrom | 4a289ed | 2011-08-16 17:17:49 -0700 | [diff] [blame] | 40 | mark_stack_ = MarkStack::Create(); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 41 | mark_bitmap_ = Heap::GetMarkBits(); |
| 42 | live_bitmap_ = Heap::GetLiveBits(); |
| 43 | |
| 44 | // TODO: if concurrent, clear the card table. |
| 45 | |
buzbee | 0d966cf | 2011-09-08 17:34:58 -0700 | [diff] [blame] | 46 | // TODO: if concurrent, enable card marking in compiler |
| 47 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 48 | // TODO: check that the mark bitmap is entirely clear. |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 49 | } |
| 50 | |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 51 | inline void MarkSweep::MarkObject0(const Object* obj, bool check_finger) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 52 | DCHECK(obj != NULL); |
| 53 | if (obj < condemned_) { |
| 54 | DCHECK(IsMarked(obj)); |
| 55 | return; |
| 56 | } |
| 57 | bool is_marked = mark_bitmap_->Test(obj); |
| 58 | // This object was not previously marked. |
| 59 | if (!is_marked) { |
| 60 | mark_bitmap_->Set(obj); |
| 61 | if (check_finger && obj < finger_) { |
| 62 | // The object must be pushed on to the mark stack. |
| 63 | mark_stack_->Push(obj); |
| 64 | } |
| 65 | } |
| 66 | } |
| 67 | |
| 68 | // Used to mark objects when recursing. Recursion is done by moving |
| 69 | // the finger across the bitmaps in address order and marking child |
| 70 | // objects. Any newly-marked objects whose addresses are lower than |
| 71 | // the finger won't be visited by the bitmap scan, so those objects |
| 72 | // need to be added to the mark stack. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 73 | inline void MarkSweep::MarkObject(const Object* obj) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 74 | if (obj != NULL) { |
| 75 | MarkObject0(obj, true); |
| 76 | } |
| 77 | } |
| 78 | |
Elliott Hughes | cf4c6c4 | 2011-09-01 15:16:42 -0700 | [diff] [blame] | 79 | void MarkSweep::MarkObjectVisitor(const Object* root, void* arg) { |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 80 | DCHECK(root != NULL); |
| 81 | DCHECK(arg != NULL); |
| 82 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 83 | DCHECK(mark_sweep->finger_ == NULL); // no point to check finger if it is NULL |
| 84 | mark_sweep->MarkObject0(root, false); |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 85 | } |
| 86 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 87 | // Marks all objects in the root set. |
| 88 | void MarkSweep::MarkRoots() { |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 89 | Runtime::Current()->VisitRoots(MarkObjectVisitor, this); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 90 | } |
| 91 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 92 | void MarkSweep::ScanImageRootVisitor(Object* root, void* arg) { |
| 93 | DCHECK(root != NULL); |
| 94 | DCHECK(arg != NULL); |
| 95 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
| 96 | DCHECK(mark_sweep->finger_ == NULL); // no point to check finger if it is NULL |
| 97 | mark_sweep->MarkObject0(root, false); |
| 98 | mark_sweep->ScanObject(root); |
| 99 | } |
| 100 | |
| 101 | // Marks all objects that are in images and have been touched by the mutator |
| 102 | void MarkSweep::ScanDirtyImageRoots() { |
| 103 | const std::vector<Space*>& spaces = Heap::GetSpaces(); |
| 104 | CardTable* card_table = Heap::GetCardTable(); |
| 105 | for (size_t i = 0; i < spaces.size(); ++i) { |
| 106 | if (spaces[i]->IsImageSpace()) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 107 | byte* begin = spaces[i]->Begin(); |
| 108 | byte* end = spaces[i]->End(); |
| 109 | card_table->Scan(begin, end, ScanImageRootVisitor, this); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 110 | } |
| 111 | } |
| 112 | } |
| 113 | |
| 114 | void MarkSweep::CheckBitmapCallback(Object* obj, void* finger, void* arg) { |
| 115 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
| 116 | mark_sweep->finger_ = reinterpret_cast<Object*>(finger); |
| 117 | mark_sweep->CheckObject(obj); |
| 118 | } |
| 119 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 120 | void MarkSweep::ScanBitmapCallback(Object* obj, void* finger, void* arg) { |
| 121 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
| 122 | mark_sweep->finger_ = reinterpret_cast<Object*>(finger); |
| 123 | mark_sweep->ScanObject(obj); |
| 124 | } |
| 125 | |
| 126 | // Populates the mark stack based on the set of marked objects and |
| 127 | // recursively marks until the mark stack is emptied. |
| 128 | void MarkSweep::RecursiveMark() { |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 129 | // RecursiveMark will build the lists of known instances of the Reference classes. |
| 130 | // See DelayReferenceReferent for details. |
| 131 | CHECK(soft_reference_list_ == NULL); |
| 132 | CHECK(weak_reference_list_ == NULL); |
| 133 | CHECK(finalizer_reference_list_ == NULL); |
| 134 | CHECK(phantom_reference_list_ == NULL); |
| 135 | CHECK(cleared_reference_list_ == NULL); |
| 136 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 137 | void* arg = reinterpret_cast<void*>(this); |
| 138 | const std::vector<Space*>& spaces = Heap::GetSpaces(); |
| 139 | for (size_t i = 0; i < spaces.size(); ++i) { |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 140 | #ifndef NDEBUG |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 141 | uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin()); |
| 142 | uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End()); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 143 | if (!spaces[i]->IsImageSpace()) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 144 | mark_bitmap_->ScanWalk(begin, end, &MarkSweep::ScanBitmapCallback, arg); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 145 | } else{ |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 146 | mark_bitmap_->ScanWalk(begin, end, &MarkSweep::CheckBitmapCallback, arg); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 147 | } |
| 148 | #else |
Elliott Hughes | 307f75d | 2011-10-12 18:04:40 -0700 | [diff] [blame] | 149 | if (!spaces[i]->IsImageSpace()) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 150 | uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin()); |
| 151 | uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End()); |
| 152 | mark_bitmap_->ScanWalk(begin, end, &MarkSweep::ScanBitmapCallback, arg); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 153 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 154 | #endif |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 155 | } |
| 156 | finger_ = reinterpret_cast<Object*>(~0); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 157 | // TODO: tune the frequency of emptying the mark stack |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 158 | ProcessMarkStack(); |
| 159 | } |
| 160 | |
| 161 | void MarkSweep::ReMarkRoots() { |
Elliott Hughes | 53b6131 | 2011-08-12 18:28:20 -0700 | [diff] [blame] | 162 | UNIMPLEMENTED(FATAL); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 163 | } |
| 164 | |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 165 | void MarkSweep::SweepJniWeakGlobals() { |
| 166 | JavaVMExt* vm = Runtime::Current()->GetJavaVM(); |
| 167 | MutexLock mu(vm->weak_globals_lock); |
| 168 | IndirectReferenceTable* table = &vm->weak_globals; |
| 169 | typedef IndirectReferenceTable::iterator It; // TODO: C++0x auto |
| 170 | for (It it = table->begin(), end = table->end(); it != end; ++it) { |
| 171 | const Object** entry = *it; |
| 172 | if (!IsMarked(*entry)) { |
| 173 | *entry = kClearedJniWeakGlobal; |
| 174 | } |
| 175 | } |
| 176 | } |
| 177 | |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 178 | void MarkSweep::SweepSystemWeaks() { |
Elliott Hughes | c33a32b | 2011-10-11 18:18:07 -0700 | [diff] [blame] | 179 | Runtime::Current()->GetInternTable()->SweepInternTableWeaks(IsMarked, this); |
| 180 | Runtime::Current()->GetMonitorList()->SweepMonitorList(IsMarked, this); |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 181 | SweepJniWeakGlobals(); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 182 | } |
| 183 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 184 | void MarkSweep::SweepCallback(size_t num_ptrs, Object** ptrs, void* arg) { |
Elliott Hughes | 362f9bc | 2011-10-17 18:56:41 -0700 | [diff] [blame] | 185 | // TODO: lock heap if concurrent |
Elliott Hughes | 307f75d | 2011-10-12 18:04:40 -0700 | [diff] [blame] | 186 | size_t freed_objects = num_ptrs; |
| 187 | size_t freed_bytes = 0; |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 188 | AllocSpace* space = static_cast<AllocSpace*>(arg); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 189 | // Use a bulk free, that merges consecutive objects before freeing or free per object? |
| 190 | // Documentation suggests better free performance with merging, but this may be at the expensive |
| 191 | // of allocation. |
| 192 | // TODO: investigate performance |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 193 | static const bool kUseFreeList = true; |
| 194 | if (kUseFreeList) { |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 195 | for (size_t i = 0; i < num_ptrs; ++i) { |
| 196 | Object* obj = static_cast<Object*>(ptrs[i]); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 197 | freed_bytes += space->AllocationSize(obj); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 198 | Heap::GetLiveBits()->Clear(obj); |
| 199 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 200 | // AllocSpace::FreeList clears the value in ptrs, so perform after clearing the live bit |
| 201 | space->FreeList(num_ptrs, ptrs); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 202 | } else { |
| 203 | for (size_t i = 0; i < num_ptrs; ++i) { |
| 204 | Object* obj = static_cast<Object*>(ptrs[i]); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 205 | freed_bytes += space->AllocationSize(obj); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 206 | Heap::GetLiveBits()->Clear(obj); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 207 | space->Free(obj); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 208 | } |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 209 | } |
Elliott Hughes | 307f75d | 2011-10-12 18:04:40 -0700 | [diff] [blame] | 210 | Heap::RecordFreeLocked(freed_objects, freed_bytes); |
Elliott Hughes | 362f9bc | 2011-10-17 18:56:41 -0700 | [diff] [blame] | 211 | // TODO: unlock heap if concurrent |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 212 | } |
| 213 | |
| 214 | void MarkSweep::Sweep() { |
Elliott Hughes | 2da5036 | 2011-10-10 16:57:08 -0700 | [diff] [blame] | 215 | SweepSystemWeaks(); |
| 216 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 217 | const std::vector<Space*>& spaces = Heap::GetSpaces(); |
| 218 | for (size_t i = 0; i < spaces.size(); ++i) { |
Elliott Hughes | 307f75d | 2011-10-12 18:04:40 -0700 | [diff] [blame] | 219 | if (!spaces[i]->IsImageSpace()) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 220 | uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin()); |
| 221 | uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End()); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 222 | void* arg = static_cast<void*>(spaces[i]); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 223 | HeapBitmap::SweepWalk(*live_bitmap_, *mark_bitmap_, begin, end, |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 224 | &MarkSweep::SweepCallback, arg); |
| 225 | } |
| 226 | } |
| 227 | } |
| 228 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 229 | // Scans instance fields. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 230 | inline void MarkSweep::ScanInstanceFields(const Object* obj) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 231 | DCHECK(obj != NULL); |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 232 | Class* klass = obj->GetClass(); |
| 233 | DCHECK(klass != NULL); |
Elliott Hughes | 2da5036 | 2011-10-10 16:57:08 -0700 | [diff] [blame] | 234 | ScanFields(obj, klass->GetReferenceInstanceOffsets(), false); |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 235 | } |
| 236 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 237 | inline void MarkSweep::CheckInstanceFields(const Object* obj) { |
| 238 | Class* klass = obj->GetClass(); |
| 239 | CheckFields(obj, klass->GetReferenceInstanceOffsets(), false); |
| 240 | } |
| 241 | |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 242 | // Scans static storage on a Class. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 243 | inline void MarkSweep::ScanStaticFields(const Class* klass) { |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 244 | DCHECK(klass != NULL); |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 245 | ScanFields(klass, klass->GetReferenceStaticOffsets(), true); |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 246 | } |
| 247 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 248 | inline void MarkSweep::CheckStaticFields(const Class* klass) { |
| 249 | CheckFields(klass, klass->GetReferenceStaticOffsets(), true); |
| 250 | } |
| 251 | |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 252 | inline void MarkSweep::ScanFields(const Object* obj, uint32_t ref_offsets, bool is_static) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 253 | if (ref_offsets != CLASS_WALK_SUPER) { |
| 254 | // Found a reference offset bitmap. Mark the specified offsets. |
| 255 | while (ref_offsets != 0) { |
| 256 | size_t right_shift = CLZ(ref_offsets); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 257 | MemberOffset byte_offset = CLASS_OFFSET_FROM_CLZ(right_shift); |
| 258 | const Object* ref = obj->GetFieldObject<const Object*>(byte_offset, false); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 259 | MarkObject(ref); |
| 260 | ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift); |
| 261 | } |
| 262 | } else { |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 263 | // There is no reference offset bitmap. In the non-static case, |
| 264 | // walk up the class inheritance hierarchy and find reference |
| 265 | // offsets the hard way. In the static case, just consider this |
| 266 | // class. |
| 267 | for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass(); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 268 | klass != NULL; |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 269 | klass = is_static ? NULL : klass->GetSuperClass()) { |
| 270 | size_t num_reference_fields = (is_static |
| 271 | ? klass->NumReferenceStaticFields() |
| 272 | : klass->NumReferenceInstanceFields()); |
| 273 | for (size_t i = 0; i < num_reference_fields; ++i) { |
| 274 | Field* field = (is_static |
| 275 | ? klass->GetStaticField(i) |
| 276 | : klass->GetInstanceField(i)); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 277 | MemberOffset field_offset = field->GetOffset(); |
| 278 | const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 279 | MarkObject(ref); |
| 280 | } |
| 281 | } |
| 282 | } |
| 283 | } |
| 284 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 285 | inline void MarkSweep::CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 286 | AllocSpace* alloc_space = Heap::GetAllocSpace(); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 287 | if (alloc_space->Contains(ref)) { |
| 288 | bool is_marked = mark_bitmap_->Test(ref); |
| 289 | if(!is_marked) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 290 | LOG(INFO) << *alloc_space; |
| 291 | LOG(WARNING) << (is_static ? "Static ref'" : "Instance ref'") << PrettyTypeOf(ref) |
| 292 | << "' (" << (void*)ref << ") in '" << PrettyTypeOf(obj) |
| 293 | << "' (" << (void*)obj << ") at offset " |
| 294 | << (void*)offset.Int32Value() << " wasn't marked"; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 295 | bool obj_marked = Heap::GetCardTable()->IsDirty(obj); |
| 296 | if (!obj_marked) { |
| 297 | LOG(WARNING) << "Object '" << PrettyTypeOf(obj) << "' (" << (void*)obj |
| 298 | << ") contains references to the alloc space, but wasn't card marked"; |
| 299 | } |
| 300 | } |
| 301 | } |
| 302 | } |
| 303 | |
| 304 | inline void MarkSweep::CheckFields(const Object* obj, uint32_t ref_offsets, bool is_static) { |
| 305 | if (ref_offsets != CLASS_WALK_SUPER) { |
| 306 | // Found a reference offset bitmap. Mark the specified offsets. |
| 307 | while (ref_offsets != 0) { |
| 308 | size_t right_shift = CLZ(ref_offsets); |
| 309 | MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift); |
| 310 | const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false); |
| 311 | CheckReference(obj, ref, field_offset, is_static); |
| 312 | ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift); |
| 313 | } |
| 314 | } else { |
| 315 | // There is no reference offset bitmap. In the non-static case, |
| 316 | // walk up the class inheritance hierarchy and find reference |
| 317 | // offsets the hard way. In the static case, just consider this |
| 318 | // class. |
| 319 | for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass(); |
| 320 | klass != NULL; |
| 321 | klass = is_static ? NULL : klass->GetSuperClass()) { |
| 322 | size_t num_reference_fields = (is_static |
| 323 | ? klass->NumReferenceStaticFields() |
| 324 | : klass->NumReferenceInstanceFields()); |
| 325 | for (size_t i = 0; i < num_reference_fields; ++i) { |
| 326 | Field* field = (is_static |
| 327 | ? klass->GetStaticField(i) |
| 328 | : klass->GetInstanceField(i)); |
| 329 | MemberOffset field_offset = field->GetOffset(); |
| 330 | const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false); |
| 331 | CheckReference(obj, ref, field_offset, is_static); |
| 332 | } |
| 333 | } |
| 334 | } |
| 335 | } |
| 336 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 337 | // Scans the header, static field references, and interface pointers |
| 338 | // of a class object. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 339 | inline void MarkSweep::ScanClass(const Object* obj) { |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 340 | #ifndef NDEBUG |
| 341 | ++class_count_; |
| 342 | #endif |
Brian Carlstrom | 693267a | 2011-09-06 09:25:34 -0700 | [diff] [blame] | 343 | ScanInstanceFields(obj); |
Brian Carlstrom | 40381fb | 2011-10-19 14:13:40 -0700 | [diff] [blame] | 344 | ScanStaticFields(obj->AsClass()); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 345 | } |
| 346 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 347 | inline void MarkSweep::CheckClass(const Object* obj) { |
| 348 | CheckInstanceFields(obj); |
| 349 | CheckStaticFields(obj->AsClass()); |
| 350 | } |
| 351 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 352 | // Scans the header of all array objects. If the array object is |
| 353 | // specialized to a reference type, scans the array data as well. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 354 | inline void MarkSweep::ScanArray(const Object* obj) { |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 355 | #ifndef NDEBUG |
| 356 | ++array_count_; |
| 357 | #endif |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 358 | MarkObject(obj->GetClass()); |
| 359 | if (obj->IsObjectArray()) { |
Brian Carlstrom | db4d540 | 2011-08-09 12:18:28 -0700 | [diff] [blame] | 360 | const ObjectArray<Object>* array = obj->AsObjectArray<Object>(); |
Elliott Hughes | d8ddfd5 | 2011-08-15 14:32:53 -0700 | [diff] [blame] | 361 | for (int32_t i = 0; i < array->GetLength(); ++i) { |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 362 | const Object* element = array->GetWithoutChecks(i); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 363 | MarkObject(element); |
| 364 | } |
| 365 | } |
| 366 | } |
| 367 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 368 | inline void MarkSweep::CheckArray(const Object* obj) { |
| 369 | CheckReference(obj, obj->GetClass(), Object::ClassOffset(), false); |
| 370 | if (obj->IsObjectArray()) { |
| 371 | const ObjectArray<Object>* array = obj->AsObjectArray<Object>(); |
| 372 | for (int32_t i = 0; i < array->GetLength(); ++i) { |
| 373 | const Object* element = array->GetWithoutChecks(i); |
| 374 | CheckReference(obj, element, MemberOffset(i * sizeof(Object*) + |
| 375 | Array::DataOffset().Int32Value()), false); |
| 376 | } |
| 377 | } |
| 378 | } |
| 379 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 380 | // Process the "referent" field in a java.lang.ref.Reference. If the |
| 381 | // referent has not yet been marked, put it on the appropriate list in |
| 382 | // the gcHeap for later processing. |
| 383 | void MarkSweep::DelayReferenceReferent(Object* obj) { |
| 384 | DCHECK(obj != NULL); |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 385 | Class* klass = obj->GetClass(); |
| 386 | DCHECK(klass != NULL); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 387 | DCHECK(klass->IsReferenceClass()); |
| 388 | Object* pending = obj->GetFieldObject<Object*>(Heap::GetReferencePendingNextOffset(), false); |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 389 | Object* referent = Heap::GetReferenceReferent(obj); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 390 | if (pending == NULL && referent != NULL && !IsMarked(referent)) { |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 391 | Object** list = NULL; |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 392 | if (klass->IsSoftReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 393 | list = &soft_reference_list_; |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 394 | } else if (klass->IsWeakReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 395 | list = &weak_reference_list_; |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 396 | } else if (klass->IsFinalizerReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 397 | list = &finalizer_reference_list_; |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 398 | } else if (klass->IsPhantomReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 399 | list = &phantom_reference_list_; |
| 400 | } |
Brian Carlstrom | 0796af0 | 2011-10-12 14:31:45 -0700 | [diff] [blame] | 401 | DCHECK(list != NULL) << PrettyClass(klass) << " " << std::hex << klass->GetAccessFlags(); |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 402 | Heap::EnqueuePendingReference(obj, list); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 403 | } |
| 404 | } |
| 405 | |
| 406 | // Scans the header and field references of a data object. If the |
| 407 | // scanned object is a reference subclass, it is scheduled for later |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 408 | // processing. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 409 | inline void MarkSweep::ScanOther(const Object* obj) { |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 410 | #ifndef NDEBUG |
| 411 | ++other_count_; |
| 412 | #endif |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 413 | ScanInstanceFields(obj); |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 414 | if (obj->GetClass()->IsReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 415 | DelayReferenceReferent(const_cast<Object*>(obj)); |
| 416 | } |
| 417 | } |
| 418 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 419 | inline void MarkSweep::CheckOther(const Object* obj) { |
| 420 | CheckInstanceFields(obj); |
| 421 | } |
| 422 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 423 | // Scans an object reference. Determines the type of the reference |
| 424 | // and dispatches to a specialized scanning routine. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 425 | inline void MarkSweep::ScanObject(const Object* obj) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 426 | DCHECK(obj != NULL); |
| 427 | DCHECK(obj->GetClass() != NULL); |
| 428 | DCHECK(IsMarked(obj)); |
| 429 | if (obj->IsClass()) { |
| 430 | ScanClass(obj); |
Brian Carlstrom | b63ec39 | 2011-08-27 17:38:27 -0700 | [diff] [blame] | 431 | } else if (obj->IsArrayInstance()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 432 | ScanArray(obj); |
| 433 | } else { |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 434 | ScanOther(obj); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 435 | } |
| 436 | } |
| 437 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 438 | // Check to see that all alloc space references are marked for the given object |
| 439 | inline void MarkSweep::CheckObject(const Object* obj) { |
| 440 | DCHECK(obj != NULL); |
| 441 | DCHECK(obj->GetClass() != NULL); |
| 442 | DCHECK(IsMarked(obj)); |
| 443 | if (obj->IsClass()) { |
| 444 | CheckClass(obj); |
| 445 | } else if (obj->IsArrayInstance()) { |
| 446 | CheckArray(obj); |
| 447 | } else { |
| 448 | CheckOther(obj); |
| 449 | } |
| 450 | } |
| 451 | |
| 452 | // Scan anything that's on the mark stack. |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 453 | void MarkSweep::ProcessMarkStack() { |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 454 | Space* alloc_space = Heap::GetAllocSpace(); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 455 | while (!mark_stack_->IsEmpty()) { |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 456 | const Object* obj = mark_stack_->Pop(); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 457 | if (alloc_space->Contains(obj)) { |
| 458 | ScanObject(obj); |
| 459 | } |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 460 | } |
| 461 | } |
| 462 | |
| 463 | void MarkSweep::ScanDirtyObjects() { |
| 464 | ProcessMarkStack(); |
| 465 | } |
| 466 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 467 | // Walks the reference list marking any references subject to the |
| 468 | // reference clearing policy. References with a black referent are |
| 469 | // removed from the list. References with white referents biased |
| 470 | // toward saving are blackened and also removed from the list. |
| 471 | void MarkSweep::PreserveSomeSoftReferences(Object** list) { |
| 472 | DCHECK(list != NULL); |
| 473 | Object* clear = NULL; |
| 474 | size_t counter = 0; |
| 475 | while (*list != NULL) { |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 476 | Object* ref = Heap::DequeuePendingReference(list); |
| 477 | Object* referent = Heap::GetReferenceReferent(ref); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 478 | if (referent == NULL) { |
| 479 | // Referent was cleared by the user during marking. |
| 480 | continue; |
| 481 | } |
| 482 | bool is_marked = IsMarked(referent); |
| 483 | if (!is_marked && ((++counter) & 1)) { |
| 484 | // Referent is white and biased toward saving, mark it. |
| 485 | MarkObject(referent); |
| 486 | is_marked = true; |
| 487 | } |
| 488 | if (!is_marked) { |
| 489 | // Referent is white, queue it for clearing. |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 490 | Heap::EnqueuePendingReference(ref, &clear); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 491 | } |
| 492 | } |
| 493 | *list = clear; |
| 494 | // Restart the mark with the newly black references added to the |
| 495 | // root set. |
| 496 | ProcessMarkStack(); |
| 497 | } |
| 498 | |
| 499 | // Unlink the reference list clearing references objects with white |
| 500 | // referents. Cleared references registered to a reference queue are |
| 501 | // scheduled for appending by the heap worker thread. |
| 502 | void MarkSweep::ClearWhiteReferences(Object** list) { |
| 503 | DCHECK(list != NULL); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 504 | while (*list != NULL) { |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 505 | Object* ref = Heap::DequeuePendingReference(list); |
| 506 | Object* referent = Heap::GetReferenceReferent(ref); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 507 | if (referent != NULL && !IsMarked(referent)) { |
| 508 | // Referent is white, clear it. |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 509 | Heap::ClearReferenceReferent(ref); |
| 510 | if (Heap::IsEnqueuable(ref)) { |
| 511 | Heap::EnqueueReference(ref, &cleared_reference_list_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 512 | } |
| 513 | } |
| 514 | } |
| 515 | DCHECK(*list == NULL); |
| 516 | } |
| 517 | |
| 518 | // Enqueues finalizer references with white referents. White |
| 519 | // referents are blackened, moved to the zombie field, and the |
| 520 | // referent field is cleared. |
| 521 | void MarkSweep::EnqueueFinalizerReferences(Object** list) { |
| 522 | DCHECK(list != NULL); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 523 | MemberOffset zombie_offset = Heap::GetFinalizerReferenceZombieOffset(); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 524 | bool has_enqueued = false; |
| 525 | while (*list != NULL) { |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 526 | Object* ref = Heap::DequeuePendingReference(list); |
| 527 | Object* referent = Heap::GetReferenceReferent(ref); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 528 | if (referent != NULL && !IsMarked(referent)) { |
| 529 | MarkObject(referent); |
| 530 | // If the referent is non-null the reference must queuable. |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 531 | DCHECK(Heap::IsEnqueuable(ref)); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 532 | ref->SetFieldObject(zombie_offset, referent, false); |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 533 | Heap::ClearReferenceReferent(ref); |
| 534 | Heap::EnqueueReference(ref, &cleared_reference_list_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 535 | has_enqueued = true; |
| 536 | } |
| 537 | } |
| 538 | if (has_enqueued) { |
| 539 | ProcessMarkStack(); |
| 540 | } |
| 541 | DCHECK(*list == NULL); |
| 542 | } |
| 543 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 544 | // Process reference class instances and schedule finalizations. |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 545 | void MarkSweep::ProcessReferences(Object** soft_references, bool clear_soft, |
| 546 | Object** weak_references, |
| 547 | Object** finalizer_references, |
| 548 | Object** phantom_references) { |
| 549 | DCHECK(soft_references != NULL); |
| 550 | DCHECK(weak_references != NULL); |
| 551 | DCHECK(finalizer_references != NULL); |
| 552 | DCHECK(phantom_references != NULL); |
| 553 | |
| 554 | // Unless we are in the zygote or required to clear soft references |
| 555 | // with white references, preserve some white referents. |
| 556 | if (clear_soft) { |
| 557 | PreserveSomeSoftReferences(soft_references); |
| 558 | } |
| 559 | |
| 560 | // Clear all remaining soft and weak references with white |
| 561 | // referents. |
| 562 | ClearWhiteReferences(soft_references); |
| 563 | ClearWhiteReferences(weak_references); |
| 564 | |
| 565 | // Preserve all white objects with finalize methods and schedule |
| 566 | // them for finalization. |
| 567 | EnqueueFinalizerReferences(finalizer_references); |
| 568 | |
| 569 | // Clear all f-reachable soft and weak references with white |
| 570 | // referents. |
| 571 | ClearWhiteReferences(soft_references); |
| 572 | ClearWhiteReferences(weak_references); |
| 573 | |
| 574 | // Clear all phantom references with white referents. |
| 575 | ClearWhiteReferences(phantom_references); |
| 576 | |
| 577 | // At this point all reference lists should be empty. |
| 578 | DCHECK(*soft_references == NULL); |
| 579 | DCHECK(*weak_references == NULL); |
| 580 | DCHECK(*finalizer_references == NULL); |
| 581 | DCHECK(*phantom_references == NULL); |
| 582 | } |
| 583 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 584 | MarkSweep::~MarkSweep() { |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 585 | #ifndef NDEBUG |
Elliott Hughes | 4dd9b4d | 2011-12-12 18:29:24 -0800 | [diff] [blame] | 586 | VLOG(heap) << "MarkSweep scanned classes=" << class_count_ << " arrays=" << array_count_ << " other=" << other_count_; |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 587 | #endif |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 588 | delete mark_stack_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 589 | mark_bitmap_->Clear(); |
| 590 | } |
| 591 | |
| 592 | } // namespace art |