arena_allocator.cc revision 25e0456b6ea13eba290b63ea88b6b7120ed89413
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26
27namespace art {
28
29static constexpr size_t kMemoryToolRedZoneBytes = 8;
30constexpr size_t Arena::kDefaultSize;
31
32template <bool kCount>
33const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
34  "Misc         ",
35  "BBList       ",
36  "BBPreds      ",
37  "DfsPreOrd    ",
38  "DfsPostOrd   ",
39  "DomPostOrd   ",
40  "TopoOrd      ",
41  "Lowering     ",
42  "LIR          ",
43  "LIR masks    ",
44  "SwitchTbl    ",
45  "FillArray    ",
46  "SlowPaths    ",
47  "MIR          ",
48  "DataFlow     ",
49  "GrowList     ",
50  "GrowBitMap   ",
51  "SSA2Dalvik   ",
52  "Dalvik2SSA   ",
53  "DebugInfo    ",
54  "RegAlloc     ",
55  "Data         ",
56  "STL          ",
57  "GraphBuilder ",
58  "Graph        ",
59  "BasicBlock   ",
60  "BlockList    ",
61  "RevPostOrder ",
62  "LinearOrder  ",
63  "ConstantsMap ",
64  "Predecessors ",
65  "Successors   ",
66  "Dominated    ",
67  "Instruction  ",
68  "InvokeInputs ",
69  "PhiInputs    ",
70  "LoopInfo     ",
71  "LIBackEdges  ",
72  "TryCatchInf  ",
73  "UseListNode  ",
74  "Environment  ",
75  "EnvVRegs     ",
76  "EnvLocations ",
77  "LocSummary   ",
78  "SsaBuilder   ",
79  "MoveOperands ",
80  "CodeBuffer   ",
81  "StackMaps    ",
82  "BaselineMaps ",
83  "Optimization ",
84  "GVN          ",
85  "InductionVar ",
86  "BCE          ",
87  "SsaLiveness  ",
88  "SsaPhiElim   ",
89  "RefTypeProp  ",
90  "PrimTypeProp ",
91  "SideEffects  ",
92  "RegAllocator ",
93  "StackMapStm  ",
94  "CodeGen      ",
95  "ParallelMove ",
96  "GraphChecker ",
97  "LSE          ",
98  "Verifier     ",
99};
100
101template <bool kCount>
102ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
103    : num_allocations_(0u) {
104  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
105}
106
107template <bool kCount>
108void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
109  num_allocations_ = other.num_allocations_;
110  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
111}
112
113template <bool kCount>
114void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
115  alloc_stats_[kind] += bytes;
116  ++num_allocations_;
117}
118
119template <bool kCount>
120size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
121  return num_allocations_;
122}
123
124template <bool kCount>
125size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
126  const size_t init = 0u;  // Initial value of the correct type.
127  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
128}
129
130template <bool kCount>
131void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
132                                           ssize_t lost_bytes_adjustment) const {
133  size_t malloc_bytes = 0u;
134  size_t lost_bytes = 0u;
135  size_t num_arenas = 0u;
136  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
137    malloc_bytes += arena->Size();
138    lost_bytes += arena->RemainingSpace();
139    ++num_arenas;
140  }
141  // The lost_bytes_adjustment is used to make up for the fact that the current arena
142  // may not have the bytes_allocated_ updated correctly.
143  lost_bytes += lost_bytes_adjustment;
144  const size_t bytes_allocated = BytesAllocated();
145  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
146     << ", lost: " << lost_bytes << "\n";
147  size_t num_allocations = NumAllocations();
148  if (num_allocations != 0) {
149    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
150       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
151  }
152  os << "===== Allocation by kind\n";
153  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
154  for (int i = 0; i < kNumArenaAllocKinds; i++) {
155      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
156  }
157}
158
159// Explicitly instantiate the used implementation.
160template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
161
162void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
163  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
164}
165
166void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
167  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
168}
169
170void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
171  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
172}
173
174Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
175}
176
177MallocArena::MallocArena(size_t size) {
178  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
179  size_ = size;
180}
181
182MallocArena::~MallocArena() {
183  free(reinterpret_cast<void*>(memory_));
184}
185
186MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
187  std::string error_msg;
188  map_.reset(MemMap::MapAnonymous(
189      name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
190  CHECK(map_.get() != nullptr) << error_msg;
191  memory_ = map_->Begin();
192  size_ = map_->Size();
193}
194
195MemMapArena::~MemMapArena() {
196  // Destroys MemMap via std::unique_ptr<>.
197}
198
199void MemMapArena::Release() {
200  if (bytes_allocated_ > 0) {
201    map_->MadviseDontNeedAndZero();
202    bytes_allocated_ = 0;
203  }
204}
205
206void Arena::Reset() {
207  if (bytes_allocated_ > 0) {
208    memset(Begin(), 0, bytes_allocated_);
209    bytes_allocated_ = 0;
210  }
211}
212
213ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
214    : use_malloc_(use_malloc),
215      lock_("Arena pool lock", kArenaPoolLock),
216      free_arenas_(nullptr),
217      low_4gb_(low_4gb),
218      name_(name) {
219  if (low_4gb) {
220    CHECK(!use_malloc) << "low4gb must use map implementation";
221  }
222  if (!use_malloc) {
223    MemMap::Init();
224  }
225}
226
227ArenaPool::~ArenaPool() {
228  ReclaimMemory();
229}
230
231void ArenaPool::ReclaimMemory() {
232  while (free_arenas_ != nullptr) {
233    auto* arena = free_arenas_;
234    free_arenas_ = free_arenas_->next_;
235    delete arena;
236  }
237}
238
239void ArenaPool::LockReclaimMemory() {
240  MutexLock lock(Thread::Current(), lock_);
241  ReclaimMemory();
242}
243
244Arena* ArenaPool::AllocArena(size_t size) {
245  Thread* self = Thread::Current();
246  Arena* ret = nullptr;
247  {
248    MutexLock lock(self, lock_);
249    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
250      ret = free_arenas_;
251      free_arenas_ = free_arenas_->next_;
252    }
253  }
254  if (ret == nullptr) {
255    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
256        new MemMapArena(size, low_4gb_, name_);
257  }
258  ret->Reset();
259  return ret;
260}
261
262void ArenaPool::TrimMaps() {
263  if (!use_malloc_) {
264    // Doesn't work for malloc.
265    MutexLock lock(Thread::Current(), lock_);
266    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
267      arena->Release();
268    }
269  }
270}
271
272size_t ArenaPool::GetBytesAllocated() const {
273  size_t total = 0;
274  MutexLock lock(Thread::Current(), lock_);
275  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
276    total += arena->GetBytesAllocated();
277  }
278  return total;
279}
280
281void ArenaPool::FreeArenaChain(Arena* first) {
282  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
283    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
284      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
285    }
286  }
287  if (first != nullptr) {
288    Arena* last = first;
289    while (last->next_ != nullptr) {
290      last = last->next_;
291    }
292    Thread* self = Thread::Current();
293    MutexLock lock(self, lock_);
294    last->next_ = free_arenas_;
295    free_arenas_ = first;
296  }
297}
298
299size_t ArenaAllocator::BytesAllocated() const {
300  return ArenaAllocatorStats::BytesAllocated();
301}
302
303size_t ArenaAllocator::BytesUsed() const {
304  size_t total = ptr_ - begin_;
305  if (arena_head_ != nullptr) {
306    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
307         cur_arena = cur_arena->next_) {
308     total += cur_arena->GetBytesAllocated();
309    }
310  }
311  return total;
312}
313
314ArenaAllocator::ArenaAllocator(ArenaPool* pool)
315  : pool_(pool),
316    begin_(nullptr),
317    end_(nullptr),
318    ptr_(nullptr),
319    arena_head_(nullptr) {
320}
321
322void ArenaAllocator::UpdateBytesAllocated() {
323  if (arena_head_ != nullptr) {
324    // Update how many bytes we have allocated into the arena so that the arena pool knows how
325    // much memory to zero out.
326    arena_head_->bytes_allocated_ = ptr_ - begin_;
327  }
328}
329
330void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
331  // We mark all memory for a newly retrieved arena as inaccessible and then
332  // mark only the actually allocated memory as defined. That leaves red zones
333  // and padding between allocations marked as inaccessible.
334  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
335  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
336    // Obtain a new block.
337    ObtainNewArenaForAllocation(rounded_bytes);
338    CHECK(ptr_ != nullptr);
339    MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
340  }
341  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
342  uint8_t* ret = ptr_;
343  ptr_ += rounded_bytes;
344  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
345  // Check that the memory is already zeroed out.
346  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
347  return ret;
348}
349
350ArenaAllocator::~ArenaAllocator() {
351  // Reclaim all the arenas by giving them back to the thread pool.
352  UpdateBytesAllocated();
353  pool_->FreeArenaChain(arena_head_);
354}
355
356void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
357  UpdateBytesAllocated();
358  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
359  new_arena->next_ = arena_head_;
360  arena_head_ = new_arena;
361  // Update our internal data structures.
362  ptr_ = begin_ = new_arena->Begin();
363  end_ = new_arena->End();
364}
365
366bool ArenaAllocator::Contains(const void* ptr) const {
367  if (ptr >= begin_ && ptr < end_) {
368    return true;
369  }
370  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
371    if (cur_arena->Contains(ptr)) {
372      return true;
373    }
374  }
375  return false;
376}
377
378MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
379                   ssize_t lost_bytes_adjustment)
380    : name_(name),
381      stats_(stats),
382      first_arena_(first_arena),
383      lost_bytes_adjustment_(lost_bytes_adjustment) {
384}
385
386void MemStats::Dump(std::ostream& os) const {
387  os << name_ << " stats:\n";
388  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
389}
390
391// Dump memory usage stats.
392MemStats ArenaAllocator::GetMemStats() const {
393  ssize_t lost_bytes_adjustment =
394      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
395  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
396}
397
398}  // namespace art
399