Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2008 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "heap_bitmap.h" |
| 18 | |
| 19 | #include "logging.h" |
| 20 | #include "UniquePtr.h" |
| 21 | #include "utils.h" |
| 22 | |
| 23 | namespace art { |
| 24 | |
| 25 | SpaceBitmap* SpaceBitmap::Create(const std::string& name, byte* heap_begin, size_t heap_capacity) { |
| 26 | CHECK(heap_begin != NULL); |
| 27 | // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord. |
| 28 | size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize; |
| 29 | UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), NULL, bitmap_size, PROT_READ | PROT_WRITE)); |
| 30 | if (mem_map.get() == NULL) { |
| 31 | LOG(ERROR) << "Failed to allocate bitmap " << name; |
| 32 | return NULL; |
| 33 | } |
| 34 | word* bitmap_begin = reinterpret_cast<word*>(mem_map->Begin()); |
| 35 | return new SpaceBitmap(name, mem_map.release(), bitmap_begin, bitmap_size, heap_begin); |
| 36 | } |
| 37 | |
| 38 | // Clean up any resources associated with the bitmap. |
| 39 | SpaceBitmap::~SpaceBitmap() {} |
| 40 | |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 41 | void SpaceBitmap::SetHeapLimit(uintptr_t new_end) { |
| 42 | DCHECK(IsAligned<kBitsPerWord * kAlignment>(new_end)); |
| 43 | size_t new_size = OffsetToIndex(new_end - heap_begin_) * kWordSize; |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 44 | if (new_size < bitmap_size_) { |
| 45 | bitmap_size_ = new_size; |
| 46 | } |
| 47 | // Not sure if doing this trim is necessary, since nothing past the end of the heap capacity |
| 48 | // should be marked. |
| 49 | // TODO: Fix this code is, it broken and causes rare heap corruption! |
| 50 | // mem_map_->Trim(reinterpret_cast<byte*>(heap_begin_ + bitmap_size_)); |
| 51 | } |
| 52 | |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 53 | // Fill the bitmap with zeroes. Returns the bitmap's memory to the |
| 54 | // system as a side-effect. |
| 55 | void SpaceBitmap::Clear() { |
| 56 | if (bitmap_begin_ != NULL) { |
| 57 | // This returns the memory to the system. Successive page faults |
| 58 | // will return zeroed memory. |
| 59 | int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED); |
| 60 | if (result == -1) { |
| 61 | PLOG(WARNING) << "madvise failed"; |
| 62 | } |
| 63 | heap_end_ = heap_begin_ - 1; |
| 64 | } |
| 65 | } |
| 66 | |
| 67 | // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover, |
| 68 | // even if a bit has not been set for it. |
| 69 | bool SpaceBitmap::HasAddress(const void* obj) const { |
| 70 | // If obj < heap_begin_ then offset underflows to some very large value past the end of the bitmap. |
| 71 | const uintptr_t offset = (uintptr_t)obj - heap_begin_; |
| 72 | const size_t index = OffsetToIndex(offset); |
| 73 | return index < bitmap_size_ / kWordSize; |
| 74 | } |
| 75 | |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 76 | // Visits set bits in address order. The callback is not permitted to |
| 77 | // change the bitmap bits or max during the traversal. |
| 78 | void SpaceBitmap::Walk(SpaceBitmap::Callback* callback, void* arg) { |
| 79 | CHECK(bitmap_begin_ != NULL); |
| 80 | CHECK(callback != NULL); |
| 81 | if (heap_end_ < heap_begin_) { |
| 82 | return; // Bitmap is empty. |
| 83 | } |
| 84 | uintptr_t end = OffsetToIndex(heap_end_ - heap_begin_); |
| 85 | for (uintptr_t i = 0; i <= end; ++i) { |
| 86 | word w = bitmap_begin_[i]; |
| 87 | if (UNLIKELY(w != 0)) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 88 | uintptr_t ptr_base = IndexToOffset(i) + heap_begin_; |
| 89 | while (w != 0) { |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 90 | const size_t shift = CLZ(w); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 91 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 92 | (*callback)(obj, arg); |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 93 | w ^= static_cast<size_t>(kWordHighBitMask) >> shift; |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 94 | } |
| 95 | } |
| 96 | } |
| 97 | } |
| 98 | |
| 99 | // Similar to Walk but the callback routine is permitted to change the bitmap bits and end during |
| 100 | // traversal. Used by the the root marking scan exclusively. |
| 101 | // |
| 102 | // The callback is invoked with a finger argument. The finger is a pointer to an address not yet |
| 103 | // visited by the traversal. If the callback sets a bit for an address at or above the finger, this |
| 104 | // address will be visited by the traversal. If the callback sets a bit for an address below the |
| 105 | // finger, this address will not be visited (typiscally such an address would be placed on the |
| 106 | // marking stack). |
| 107 | void SpaceBitmap::ScanWalk(uintptr_t scan_begin, uintptr_t scan_end, ScanCallback* callback, void* arg) { |
| 108 | CHECK(bitmap_begin_ != NULL); |
| 109 | CHECK(callback != NULL); |
| 110 | CHECK_LE(scan_begin, scan_end); |
| 111 | CHECK_GE(scan_begin, heap_begin_); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 112 | |
| 113 | // This function doesn't support unaligned boundaries yet. |
| 114 | size_t begin_offset = scan_begin - heap_begin_; |
| 115 | size_t end_offset = scan_end - heap_begin_; |
| 116 | DCHECK((begin_offset / kAlignment) % kBitsPerWord == 0) |
| 117 | << "scan begin " << reinterpret_cast<const void*>(scan_begin) |
| 118 | << " with offset " << begin_offset |
| 119 | << " not aligned to word boundary"; |
| 120 | DCHECK((end_offset / kAlignment) % kBitsPerWord == 0) |
| 121 | << "scan end " << reinterpret_cast<const void*>(scan_end) |
| 122 | << " with offset " << end_offset |
| 123 | << " not aligned to word boundary"; |
| 124 | |
| 125 | size_t start = OffsetToIndex(begin_offset); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 126 | if (scan_end < heap_end_) { |
| 127 | // The end of the space we're looking at is before the current maximum bitmap PC, scan to that |
| 128 | // and don't recompute end on each iteration |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 129 | size_t end = OffsetToIndex(end_offset - 1); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 130 | for (size_t i = start; i <= end; i++) { |
| 131 | word w = bitmap_begin_[i]; |
| 132 | if (UNLIKELY(w != 0)) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 133 | uintptr_t ptr_base = IndexToOffset(i) + heap_begin_; |
| 134 | void* finger = reinterpret_cast<void*>(IndexToOffset(i + 1) + heap_begin_); |
| 135 | while (w != 0) { |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 136 | const size_t shift = CLZ(w); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 137 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 138 | (*callback)(obj, finger, arg); |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 139 | w ^= static_cast<size_t>(kWordHighBitMask) >> shift; |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 140 | } |
| 141 | } |
| 142 | } |
| 143 | } else { |
| 144 | size_t end = OffsetToIndex(heap_end_ - heap_begin_); |
| 145 | for (size_t i = start; i <= end; i++) { |
| 146 | word w = bitmap_begin_[i]; |
| 147 | if (UNLIKELY(w != 0)) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 148 | uintptr_t ptr_base = IndexToOffset(i) + heap_begin_; |
| 149 | void* finger = reinterpret_cast<void*>(IndexToOffset(i + 1) + heap_begin_); |
| 150 | while (w != 0) { |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 151 | const size_t shift = CLZ(w); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 152 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 153 | (*callback)(obj, finger, arg); |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 154 | w ^= static_cast<size_t>(kWordHighBitMask) >> shift; |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 155 | } |
| 156 | } |
| 157 | // update 'end' in case callback modified bitmap |
| 158 | end = OffsetToIndex(heap_end_ - heap_begin_); |
| 159 | } |
| 160 | } |
| 161 | } |
| 162 | |
| 163 | // Walk through the bitmaps in increasing address order, and find the |
| 164 | // object pointers that correspond to garbage objects. Call |
| 165 | // <callback> zero or more times with lists of these object pointers. |
| 166 | // |
| 167 | // The callback is not permitted to increase the max of either bitmap. |
| 168 | void SpaceBitmap::SweepWalk(const SpaceBitmap& live_bitmap, |
| 169 | const SpaceBitmap& mark_bitmap, |
| 170 | uintptr_t sweep_begin, uintptr_t sweep_end, |
| 171 | SpaceBitmap::SweepCallback* callback, void* arg) { |
| 172 | CHECK(live_bitmap.bitmap_begin_ != NULL); |
| 173 | CHECK(mark_bitmap.bitmap_begin_ != NULL); |
| 174 | CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_); |
| 175 | CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_); |
| 176 | CHECK(callback != NULL); |
| 177 | CHECK_LE(sweep_begin, sweep_end); |
| 178 | CHECK_GE(sweep_begin, live_bitmap.heap_begin_); |
| 179 | sweep_end = std::min(sweep_end - 1, live_bitmap.heap_end_); |
| 180 | if (live_bitmap.heap_end_ < live_bitmap.heap_begin_) { |
| 181 | // Easy case; both are obviously empty. |
| 182 | // TODO: this should never happen |
| 183 | return; |
| 184 | } |
| 185 | // TODO: rewrite the callbacks to accept a std::vector<Object*> rather than a Object**? |
| 186 | std::vector<Object*> pointer_buf(4 * kBitsPerWord); |
| 187 | Object** pb = &pointer_buf[0]; |
| 188 | size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_); |
| 189 | size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_); |
| 190 | word* live = live_bitmap.bitmap_begin_; |
| 191 | word* mark = mark_bitmap.bitmap_begin_; |
| 192 | for (size_t i = start; i <= end; i++) { |
| 193 | word garbage = live[i] & ~mark[i]; |
| 194 | if (UNLIKELY(garbage != 0)) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 195 | uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_; |
| 196 | while (garbage != 0) { |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 197 | const size_t shift = CLZ(garbage); |
| 198 | garbage ^= static_cast<size_t>(kWordHighBitMask) >> shift; |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 199 | *pb++ = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 200 | } |
| 201 | // Make sure that there are always enough slots available for an |
| 202 | // entire word of one bits. |
| 203 | if (pb >= &pointer_buf[pointer_buf.size() - kBitsPerWord]) { |
| 204 | (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg); |
| 205 | pb = &pointer_buf[0]; |
| 206 | } |
| 207 | } |
| 208 | } |
| 209 | if (pb > &pointer_buf[0]) { |
| 210 | (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg); |
| 211 | } |
| 212 | } |
| 213 | |
| 214 | } // namespace art |
| 215 | |
| 216 | // Support needed for in order traversal |
| 217 | #include "object.h" |
| 218 | #include "object_utils.h" |
| 219 | |
| 220 | namespace art { |
| 221 | |
| 222 | static void WalkFieldsInOrder(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj, |
| 223 | void* arg); |
| 224 | |
| 225 | // Walk instance fields of the given Class. Separate function to allow recursion on the super |
| 226 | // class. |
| 227 | static void WalkInstanceFields(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj, |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame^] | 228 | Class* klass, void* arg) |
| 229 | SHARED_LOCKS_REQUIRED(GlobalSynchronization::mutator_lock_) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 230 | // Visit fields of parent classes first. |
| 231 | Class* super = klass->GetSuperClass(); |
| 232 | if (super != NULL) { |
| 233 | WalkInstanceFields(visited, callback, obj, super, arg); |
| 234 | } |
| 235 | // Walk instance fields |
| 236 | ObjectArray<Field>* fields = klass->GetIFields(); |
| 237 | if (fields != NULL) { |
| 238 | for (int32_t i = 0; i < fields->GetLength(); i++) { |
| 239 | Field* field = fields->Get(i); |
| 240 | FieldHelper fh(field); |
| 241 | if (!fh.GetType()->IsPrimitive()) { |
| 242 | Object* value = field->GetObj(obj); |
| 243 | if (value != NULL) { |
| 244 | WalkFieldsInOrder(visited, callback, value, arg); |
| 245 | } |
| 246 | } |
| 247 | } |
| 248 | } |
| 249 | } |
| 250 | |
| 251 | // For an unvisited object, visit it then all its children found via fields. |
| 252 | static void WalkFieldsInOrder(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj, |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame^] | 253 | void* arg) |
| 254 | SHARED_LOCKS_REQUIRED(GlobalSynchronization::mutator_lock_) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 255 | if (visited->Test(obj)) { |
| 256 | return; |
| 257 | } |
| 258 | // visit the object itself |
| 259 | (*callback)(obj, arg); |
| 260 | visited->Set(obj); |
| 261 | // Walk instance fields of all objects |
| 262 | Class* klass = obj->GetClass(); |
| 263 | WalkInstanceFields(visited, callback, obj, klass, arg); |
| 264 | // Walk static fields of a Class |
| 265 | if (obj->IsClass()) { |
| 266 | ObjectArray<Field>* fields = klass->GetSFields(); |
| 267 | if (fields != NULL) { |
| 268 | for (int32_t i = 0; i < fields->GetLength(); i++) { |
| 269 | Field* field = fields->Get(i); |
| 270 | FieldHelper fh(field); |
| 271 | if (!fh.GetType()->IsPrimitive()) { |
| 272 | Object* value = field->GetObj(NULL); |
| 273 | if (value != NULL) { |
| 274 | WalkFieldsInOrder(visited, callback, value, arg); |
| 275 | } |
| 276 | } |
| 277 | } |
| 278 | } |
| 279 | } else if (obj->IsObjectArray()) { |
| 280 | // Walk elements of an object array |
| 281 | ObjectArray<Object>* obj_array = obj->AsObjectArray<Object>(); |
| 282 | int32_t length = obj_array->GetLength(); |
| 283 | for (int32_t i = 0; i < length; i++) { |
| 284 | Object* value = obj_array->Get(i); |
| 285 | if (value != NULL) { |
| 286 | WalkFieldsInOrder(visited, callback, value, arg); |
| 287 | } |
| 288 | } |
| 289 | } |
| 290 | } |
| 291 | |
| 292 | // Visits set bits with an in order traversal. The callback is not permitted to change the bitmap |
| 293 | // bits or max during the traversal. |
| 294 | void SpaceBitmap::InOrderWalk(SpaceBitmap::Callback* callback, void* arg) { |
| 295 | UniquePtr<SpaceBitmap> visited(Create("bitmap for in-order walk", |
| 296 | reinterpret_cast<byte*>(heap_begin_), |
| 297 | IndexToOffset(bitmap_size_ / kWordSize))); |
| 298 | CHECK(bitmap_begin_ != NULL); |
| 299 | CHECK(callback != NULL); |
| 300 | uintptr_t end = OffsetToIndex(heap_end_ - heap_begin_); |
| 301 | for (uintptr_t i = 0; i <= end; ++i) { |
| 302 | word w = bitmap_begin_[i]; |
| 303 | if (UNLIKELY(w != 0)) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 304 | uintptr_t ptr_base = IndexToOffset(i) + heap_begin_; |
| 305 | while (w != 0) { |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 306 | const size_t shift = CLZ(w); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 307 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 308 | WalkFieldsInOrder(visited.get(), callback, obj, arg); |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 309 | w ^= static_cast<size_t>(kWordHighBitMask) >> shift; |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 310 | } |
| 311 | } |
| 312 | } |
| 313 | } |
| 314 | |
| 315 | } // namespace art |