blob: a36b0fbb9300782c8945677eba53af92585054c9 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Evgenii Stepanov1e133742015-05-20 12:30:59 -070026#include "base/memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
36 "BBList ",
37 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
43 "LIR ",
44 "LIR masks ",
45 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
48 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
52 "SSA2Dalvik ",
53 "Dalvik2SSA ",
54 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "RegAlloc ",
56 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010057 "STL ",
58 "Graph ",
59 "BasicBlock ",
Vladimir Marko91e11c02015-09-02 17:03:22 +010060 "Predecessors ",
61 "Successors ",
62 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010063 "Instruction ",
64 "LoopInfo ",
65 "TryCatchInf ",
66 "UseListNode ",
67 "Environment ",
68 "MoveOperands ",
69 "CodeBuffer ",
70 "StackMaps ",
71 "BaselineMaps ",
72 "Optimization ",
buzbee862a7602013-04-05 10:58:54 -070073};
74
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000075template <bool kCount>
76ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
77 : num_allocations_(0u) {
78 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
79}
80
81template <bool kCount>
82void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
83 num_allocations_ = other.num_allocations_;
84 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
85}
86
87template <bool kCount>
88void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
89 alloc_stats_[kind] += bytes;
90 ++num_allocations_;
91}
92
93template <bool kCount>
94size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
95 return num_allocations_;
96}
97
98template <bool kCount>
99size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
100 const size_t init = 0u; // Initial value of the correct type.
101 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
102}
103
104template <bool kCount>
105void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
106 ssize_t lost_bytes_adjustment) const {
107 size_t malloc_bytes = 0u;
108 size_t lost_bytes = 0u;
109 size_t num_arenas = 0u;
110 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
111 malloc_bytes += arena->Size();
112 lost_bytes += arena->RemainingSpace();
113 ++num_arenas;
114 }
115 // The lost_bytes_adjustment is used to make up for the fact that the current arena
116 // may not have the bytes_allocated_ updated correctly.
117 lost_bytes += lost_bytes_adjustment;
118 const size_t bytes_allocated = BytesAllocated();
119 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
120 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000121 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000122 if (num_allocations != 0) {
123 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
124 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
125 }
126 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800127 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000128 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000129 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000130 }
131}
132
133// Explicitly instantiate the used implementation.
134template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
135
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700136Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700137}
138
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700139MallocArena::MallocArena(size_t size) {
140 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
141 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700142}
143
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700144MallocArena::~MallocArena() {
145 free(reinterpret_cast<void*>(memory_));
146}
147
Mathieu Chartierc7853442015-03-27 14:35:38 -0700148MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700149 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700150 map_.reset(MemMap::MapAnonymous(
151 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700152 CHECK(map_.get() != nullptr) << error_msg;
153 memory_ = map_->Begin();
154 size_ = map_->Size();
155}
156
Vladimir Marko3481ba22015-04-13 12:22:36 +0100157MemMapArena::~MemMapArena() {
158 // Destroys MemMap via std::unique_ptr<>.
159}
160
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700161void MemMapArena::Release() {
162 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700163 map_->MadviseDontNeedAndZero();
164 bytes_allocated_ = 0;
165 }
166}
167
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700168void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700169 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700170 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700171 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700172 }
buzbee862a7602013-04-05 10:58:54 -0700173}
174
Mathieu Chartierc7853442015-03-27 14:35:38 -0700175ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
176 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
177 low_4gb_(low_4gb) {
178 if (low_4gb) {
179 CHECK(!use_malloc) << "low4gb must use map implementation";
180 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700181 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700182 MemMap::Init();
183 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700184}
185
186ArenaPool::~ArenaPool() {
187 while (free_arenas_ != nullptr) {
188 auto* arena = free_arenas_;
189 free_arenas_ = free_arenas_->next_;
190 delete arena;
191 }
192}
193
194Arena* ArenaPool::AllocArena(size_t size) {
195 Thread* self = Thread::Current();
196 Arena* ret = nullptr;
197 {
198 MutexLock lock(self, lock_);
199 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
200 ret = free_arenas_;
201 free_arenas_ = free_arenas_->next_;
202 }
203 }
204 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700205 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
206 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700207 }
208 ret->Reset();
209 return ret;
210}
211
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700212void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700213 if (!use_malloc_) {
214 // Doesn't work for malloc.
215 MutexLock lock(Thread::Current(), lock_);
216 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
217 arena->Release();
218 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700219 }
220}
221
Mathieu Chartier49285c52014-12-02 15:43:48 -0800222size_t ArenaPool::GetBytesAllocated() const {
223 size_t total = 0;
224 MutexLock lock(Thread::Current(), lock_);
225 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
226 total += arena->GetBytesAllocated();
227 }
228 return total;
229}
230
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000231void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700232 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000233 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700234 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000235 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700236 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000237 if (first != nullptr) {
238 Arena* last = first;
239 while (last->next_ != nullptr) {
240 last = last->next_;
241 }
242 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700243 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000244 last->next_ = free_arenas_;
245 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700246 }
247}
248
249size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000250 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700251}
252
Mathieu Chartierc7853442015-03-27 14:35:38 -0700253size_t ArenaAllocator::BytesUsed() const {
254 size_t total = ptr_ - begin_;
255 if (arena_head_ != nullptr) {
256 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
257 cur_arena = cur_arena->next_) {
258 total += cur_arena->GetBytesAllocated();
259 }
260 }
261 return total;
262}
263
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700264ArenaAllocator::ArenaAllocator(ArenaPool* pool)
265 : pool_(pool),
266 begin_(nullptr),
267 end_(nullptr),
268 ptr_(nullptr),
269 arena_head_(nullptr),
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700270 is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700271}
272
273void ArenaAllocator::UpdateBytesAllocated() {
274 if (arena_head_ != nullptr) {
275 // Update how many bytes we have allocated into the arena so that the arena pool knows how
276 // much memory to zero out.
277 arena_head_->bytes_allocated_ = ptr_ - begin_;
278 }
279}
280
Mathieu Chartier75165d02013-09-12 14:00:31 -0700281void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700282 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700283 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
284 // Obtain a new block.
285 ObtainNewArenaForAllocation(rounded_bytes);
286 if (UNLIKELY(ptr_ == nullptr)) {
287 return nullptr;
288 }
289 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000290 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700291 uint8_t* ret = ptr_;
292 ptr_ += rounded_bytes;
293 // Check that the memory is already zeroed out.
294 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
295 CHECK_EQ(*ptr, 0U);
296 }
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700297 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700298 return ret;
299}
300
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700301ArenaAllocator::~ArenaAllocator() {
302 // Reclaim all the arenas by giving them back to the thread pool.
303 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000304 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700305}
306
307void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
308 UpdateBytesAllocated();
309 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
310 new_arena->next_ = arena_head_;
311 arena_head_ = new_arena;
312 // Update our internal data structures.
313 ptr_ = begin_ = new_arena->Begin();
314 end_ = new_arena->End();
315}
316
Mathieu Chartiere401d142015-04-22 13:56:20 -0700317bool ArenaAllocator::Contains(const void* ptr) const {
318 if (ptr >= begin_ && ptr < end_) {
319 return true;
320 }
321 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
322 if (cur_arena->Contains(ptr)) {
323 return true;
324 }
325 }
326 return false;
327}
328
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000329MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
330 ssize_t lost_bytes_adjustment)
331 : name_(name),
332 stats_(stats),
333 first_arena_(first_arena),
334 lost_bytes_adjustment_(lost_bytes_adjustment) {
335}
336
337void MemStats::Dump(std::ostream& os) const {
338 os << name_ << " stats:\n";
339 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
340}
341
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700342// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000343MemStats ArenaAllocator::GetMemStats() const {
344 ssize_t lost_bytes_adjustment =
345 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
346 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700347}
348
349} // namespace art