arena_allocator.cc revision ca76a1a0c1737b3e04961ba382c113102fdc00bf
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26
27namespace art {
28
29static constexpr size_t kMemoryToolRedZoneBytes = 8;
30constexpr size_t Arena::kDefaultSize;
31
32template <bool kCount>
33const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
34  "Misc         ",
35  "BBList       ",
36  "BBPreds      ",
37  "DfsPreOrd    ",
38  "DfsPostOrd   ",
39  "DomPostOrd   ",
40  "TopoOrd      ",
41  "Lowering     ",
42  "LIR          ",
43  "LIR masks    ",
44  "SwitchTbl    ",
45  "FillArray    ",
46  "SlowPaths    ",
47  "MIR          ",
48  "DataFlow     ",
49  "GrowList     ",
50  "GrowBitMap   ",
51  "SSA2Dalvik   ",
52  "Dalvik2SSA   ",
53  "DebugInfo    ",
54  "RegAlloc     ",
55  "Data         ",
56  "STL          ",
57  "GraphBuilder ",
58  "Graph        ",
59  "BasicBlock   ",
60  "BlockList    ",
61  "RevPostOrder ",
62  "LinearOrder  ",
63  "ConstantsMap ",
64  "Predecessors ",
65  "Successors   ",
66  "Dominated    ",
67  "Instruction  ",
68  "InvokeInputs ",
69  "PhiInputs    ",
70  "LoopInfo     ",
71  "LIBackEdges  ",
72  "TryCatchInf  ",
73  "UseListNode  ",
74  "Environment  ",
75  "EnvVRegs     ",
76  "EnvLocations ",
77  "LocSummary   ",
78  "SsaBuilder   ",
79  "MoveOperands ",
80  "CodeBuffer   ",
81  "StackMaps    ",
82  "BaselineMaps ",
83  "Optimization ",
84  "GVN          ",
85  "InductionVar ",
86  "BCE          ",
87  "SsaLiveness  ",
88  "SsaPhiElim   ",
89  "RefTypeProp  ",
90  "PrimTypeProp ",
91  "SideEffects  ",
92  "RegAllocator ",
93  "StackMapStm  ",
94  "CodeGen      ",
95  "ParallelMove ",
96  "GraphChecker ",
97  "LSE          ",
98  "Verifier     ",
99};
100
101template <bool kCount>
102ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
103    : num_allocations_(0u) {
104  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
105}
106
107template <bool kCount>
108void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
109  num_allocations_ = other.num_allocations_;
110  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
111}
112
113template <bool kCount>
114void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
115  alloc_stats_[kind] += bytes;
116  ++num_allocations_;
117}
118
119template <bool kCount>
120size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
121  return num_allocations_;
122}
123
124template <bool kCount>
125size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
126  const size_t init = 0u;  // Initial value of the correct type.
127  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
128}
129
130template <bool kCount>
131void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
132                                           ssize_t lost_bytes_adjustment) const {
133  size_t malloc_bytes = 0u;
134  size_t lost_bytes = 0u;
135  size_t num_arenas = 0u;
136  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
137    malloc_bytes += arena->Size();
138    lost_bytes += arena->RemainingSpace();
139    ++num_arenas;
140  }
141  // The lost_bytes_adjustment is used to make up for the fact that the current arena
142  // may not have the bytes_allocated_ updated correctly.
143  lost_bytes += lost_bytes_adjustment;
144  const size_t bytes_allocated = BytesAllocated();
145  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
146     << ", lost: " << lost_bytes << "\n";
147  size_t num_allocations = NumAllocations();
148  if (num_allocations != 0) {
149    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
150       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
151  }
152  os << "===== Allocation by kind\n";
153  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
154  for (int i = 0; i < kNumArenaAllocKinds; i++) {
155      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
156  }
157}
158
159// Explicitly instantiate the used implementation.
160template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
161
162void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
163  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
164}
165
166void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
167  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
168}
169
170void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
171  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
172}
173
174Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
175}
176
177MallocArena::MallocArena(size_t size) {
178  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
179  size_ = size;
180}
181
182MallocArena::~MallocArena() {
183  free(reinterpret_cast<void*>(memory_));
184}
185
186MemMapArena::MemMapArena(size_t size, bool low_4gb) {
187  std::string error_msg;
188  map_.reset(MemMap::MapAnonymous(
189      "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
190  CHECK(map_.get() != nullptr) << error_msg;
191  memory_ = map_->Begin();
192  size_ = map_->Size();
193}
194
195MemMapArena::~MemMapArena() {
196  // Destroys MemMap via std::unique_ptr<>.
197}
198
199void MemMapArena::Release() {
200  if (bytes_allocated_ > 0) {
201    map_->MadviseDontNeedAndZero();
202    bytes_allocated_ = 0;
203  }
204}
205
206void Arena::Reset() {
207  if (bytes_allocated_ > 0) {
208    memset(Begin(), 0, bytes_allocated_);
209    bytes_allocated_ = 0;
210  }
211}
212
213ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
214    : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
215      low_4gb_(low_4gb) {
216  if (low_4gb) {
217    CHECK(!use_malloc) << "low4gb must use map implementation";
218  }
219  if (!use_malloc) {
220    MemMap::Init();
221  }
222}
223
224ArenaPool::~ArenaPool() {
225  ReclaimMemory();
226}
227
228void ArenaPool::ReclaimMemory() {
229  while (free_arenas_ != nullptr) {
230    auto* arena = free_arenas_;
231    free_arenas_ = free_arenas_->next_;
232    delete arena;
233  }
234}
235
236void ArenaPool::LockReclaimMemory() {
237  MutexLock lock(Thread::Current(), lock_);
238  ReclaimMemory();
239}
240
241Arena* ArenaPool::AllocArena(size_t size) {
242  Thread* self = Thread::Current();
243  Arena* ret = nullptr;
244  {
245    MutexLock lock(self, lock_);
246    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
247      ret = free_arenas_;
248      free_arenas_ = free_arenas_->next_;
249    }
250  }
251  if (ret == nullptr) {
252    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
253        new MemMapArena(size, low_4gb_);
254  }
255  ret->Reset();
256  return ret;
257}
258
259void ArenaPool::TrimMaps() {
260  if (!use_malloc_) {
261    // Doesn't work for malloc.
262    MutexLock lock(Thread::Current(), lock_);
263    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
264      arena->Release();
265    }
266  }
267}
268
269size_t ArenaPool::GetBytesAllocated() const {
270  size_t total = 0;
271  MutexLock lock(Thread::Current(), lock_);
272  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
273    total += arena->GetBytesAllocated();
274  }
275  return total;
276}
277
278void ArenaPool::FreeArenaChain(Arena* first) {
279  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
280    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
281      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
282    }
283  }
284  if (first != nullptr) {
285    Arena* last = first;
286    while (last->next_ != nullptr) {
287      last = last->next_;
288    }
289    Thread* self = Thread::Current();
290    MutexLock lock(self, lock_);
291    last->next_ = free_arenas_;
292    free_arenas_ = first;
293  }
294}
295
296size_t ArenaAllocator::BytesAllocated() const {
297  return ArenaAllocatorStats::BytesAllocated();
298}
299
300size_t ArenaAllocator::BytesUsed() const {
301  size_t total = ptr_ - begin_;
302  if (arena_head_ != nullptr) {
303    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
304         cur_arena = cur_arena->next_) {
305     total += cur_arena->GetBytesAllocated();
306    }
307  }
308  return total;
309}
310
311ArenaAllocator::ArenaAllocator(ArenaPool* pool)
312  : pool_(pool),
313    begin_(nullptr),
314    end_(nullptr),
315    ptr_(nullptr),
316    arena_head_(nullptr) {
317}
318
319void ArenaAllocator::UpdateBytesAllocated() {
320  if (arena_head_ != nullptr) {
321    // Update how many bytes we have allocated into the arena so that the arena pool knows how
322    // much memory to zero out.
323    arena_head_->bytes_allocated_ = ptr_ - begin_;
324  }
325}
326
327void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
328  // We mark all memory for a newly retrieved arena as inaccessible and then
329  // mark only the actually allocated memory as defined. That leaves red zones
330  // and padding between allocations marked as inaccessible.
331  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
332  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
333    // Obtain a new block.
334    ObtainNewArenaForAllocation(rounded_bytes);
335    CHECK(ptr_ != nullptr);
336    MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
337  }
338  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
339  uint8_t* ret = ptr_;
340  ptr_ += rounded_bytes;
341  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
342  // Check that the memory is already zeroed out.
343  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
344  return ret;
345}
346
347ArenaAllocator::~ArenaAllocator() {
348  // Reclaim all the arenas by giving them back to the thread pool.
349  UpdateBytesAllocated();
350  pool_->FreeArenaChain(arena_head_);
351}
352
353void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
354  UpdateBytesAllocated();
355  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
356  new_arena->next_ = arena_head_;
357  arena_head_ = new_arena;
358  // Update our internal data structures.
359  ptr_ = begin_ = new_arena->Begin();
360  end_ = new_arena->End();
361}
362
363bool ArenaAllocator::Contains(const void* ptr) const {
364  if (ptr >= begin_ && ptr < end_) {
365    return true;
366  }
367  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
368    if (cur_arena->Contains(ptr)) {
369      return true;
370    }
371  }
372  return false;
373}
374
375MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
376                   ssize_t lost_bytes_adjustment)
377    : name_(name),
378      stats_(stats),
379      first_arena_(first_arena),
380      lost_bytes_adjustment_(lost_bytes_adjustment) {
381}
382
383void MemStats::Dump(std::ostream& os) const {
384  os << name_ << " stats:\n";
385  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
386}
387
388// Dump memory usage stats.
389MemStats ArenaAllocator::GetMemStats() const {
390  ssize_t lost_bytes_adjustment =
391      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
392  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
393}
394
395}  // namespace art
396