arena_allocator.cc revision 75001934af9fa3f2538f564bb4073d711809f1ff
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26
27namespace art {
28
29static constexpr size_t kMemoryToolRedZoneBytes = 8;
30constexpr size_t Arena::kDefaultSize;
31
32template <bool kCount>
33const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
34  "Misc         ",
35  "BBList       ",
36  "BBPreds      ",
37  "DfsPreOrd    ",
38  "DfsPostOrd   ",
39  "DomPostOrd   ",
40  "TopoOrd      ",
41  "Lowering     ",
42  "LIR          ",
43  "LIR masks    ",
44  "SwitchTbl    ",
45  "FillArray    ",
46  "SlowPaths    ",
47  "MIR          ",
48  "DataFlow     ",
49  "GrowList     ",
50  "GrowBitMap   ",
51  "SSA2Dalvik   ",
52  "Dalvik2SSA   ",
53  "DebugInfo    ",
54  "RegAlloc     ",
55  "Data         ",
56  "STL          ",
57  "GraphBuilder ",
58  "Graph        ",
59  "BasicBlock   ",
60  "BlockList    ",
61  "RevPostOrder ",
62  "LinearOrder  ",
63  "ConstantsMap ",
64  "Predecessors ",
65  "Successors   ",
66  "Dominated    ",
67  "Instruction  ",
68  "InvokeInputs ",
69  "PhiInputs    ",
70  "LoopInfo     ",
71  "LIBackEdges  ",
72  "TryCatchInf  ",
73  "UseListNode  ",
74  "Environment  ",
75  "EnvVRegs     ",
76  "EnvLocations ",
77  "LocSummary   ",
78  "SsaBuilder   ",
79  "MoveOperands ",
80  "CodeBuffer   ",
81  "StackMaps    ",
82  "BaselineMaps ",
83  "Optimization ",
84  "GVN          ",
85  "InductionVar ",
86  "BCE          ",
87  "SsaLiveness  ",
88  "SsaPhiElim   ",
89  "RefTypeProp  ",
90  "PrimTypeProp ",
91  "SideEffects  ",
92  "RegAllocator ",
93  "StackMapStm  ",
94  "CodeGen      ",
95  "ParallelMove ",
96  "GraphChecker ",
97  "LSE          ",
98  "Verifier     ",
99};
100
101template <bool kCount>
102ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
103    : num_allocations_(0u) {
104  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
105}
106
107template <bool kCount>
108void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
109  num_allocations_ = other.num_allocations_;
110  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
111}
112
113template <bool kCount>
114void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
115  alloc_stats_[kind] += bytes;
116  ++num_allocations_;
117}
118
119template <bool kCount>
120size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
121  return num_allocations_;
122}
123
124template <bool kCount>
125size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
126  const size_t init = 0u;  // Initial value of the correct type.
127  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
128}
129
130template <bool kCount>
131void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
132                                           ssize_t lost_bytes_adjustment) const {
133  size_t malloc_bytes = 0u;
134  size_t lost_bytes = 0u;
135  size_t num_arenas = 0u;
136  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
137    malloc_bytes += arena->Size();
138    lost_bytes += arena->RemainingSpace();
139    ++num_arenas;
140  }
141  // The lost_bytes_adjustment is used to make up for the fact that the current arena
142  // may not have the bytes_allocated_ updated correctly.
143  lost_bytes += lost_bytes_adjustment;
144  const size_t bytes_allocated = BytesAllocated();
145  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
146     << ", lost: " << lost_bytes << "\n";
147  size_t num_allocations = NumAllocations();
148  if (num_allocations != 0) {
149    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
150       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
151  }
152  os << "===== Allocation by kind\n";
153  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
154  for (int i = 0; i < kNumArenaAllocKinds; i++) {
155      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
156  }
157}
158
159// Explicitly instantiate the used implementation.
160template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
161
162void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
163  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
164}
165
166void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
167  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
168}
169
170void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
171  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
172}
173
174Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
175}
176
177MallocArena::MallocArena(size_t size) {
178  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
179  size_ = size;
180}
181
182MallocArena::~MallocArena() {
183  free(reinterpret_cast<void*>(memory_));
184}
185
186MemMapArena::MemMapArena(size_t size, bool low_4gb) {
187  std::string error_msg;
188  map_.reset(MemMap::MapAnonymous(
189      "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
190  CHECK(map_.get() != nullptr) << error_msg;
191  memory_ = map_->Begin();
192  size_ = map_->Size();
193}
194
195MemMapArena::~MemMapArena() {
196  // Destroys MemMap via std::unique_ptr<>.
197}
198
199void MemMapArena::Release() {
200  if (bytes_allocated_ > 0) {
201    map_->MadviseDontNeedAndZero();
202    bytes_allocated_ = 0;
203  }
204}
205
206void Arena::Reset() {
207  if (bytes_allocated_ > 0) {
208    memset(Begin(), 0, bytes_allocated_);
209    bytes_allocated_ = 0;
210  }
211}
212
213ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
214    : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
215      low_4gb_(low_4gb) {
216  if (low_4gb) {
217    CHECK(!use_malloc) << "low4gb must use map implementation";
218  }
219  if (!use_malloc) {
220    MemMap::Init();
221  }
222}
223
224ArenaPool::~ArenaPool() {
225  while (free_arenas_ != nullptr) {
226    auto* arena = free_arenas_;
227    free_arenas_ = free_arenas_->next_;
228    delete arena;
229  }
230}
231
232Arena* ArenaPool::AllocArena(size_t size) {
233  Thread* self = Thread::Current();
234  Arena* ret = nullptr;
235  {
236    MutexLock lock(self, lock_);
237    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
238      ret = free_arenas_;
239      free_arenas_ = free_arenas_->next_;
240    }
241  }
242  if (ret == nullptr) {
243    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
244        new MemMapArena(size, low_4gb_);
245  }
246  ret->Reset();
247  return ret;
248}
249
250void ArenaPool::TrimMaps() {
251  if (!use_malloc_) {
252    // Doesn't work for malloc.
253    MutexLock lock(Thread::Current(), lock_);
254    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
255      arena->Release();
256    }
257  }
258}
259
260size_t ArenaPool::GetBytesAllocated() const {
261  size_t total = 0;
262  MutexLock lock(Thread::Current(), lock_);
263  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
264    total += arena->GetBytesAllocated();
265  }
266  return total;
267}
268
269void ArenaPool::FreeArenaChain(Arena* first) {
270  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
271    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
272      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
273    }
274  }
275  if (first != nullptr) {
276    Arena* last = first;
277    while (last->next_ != nullptr) {
278      last = last->next_;
279    }
280    Thread* self = Thread::Current();
281    MutexLock lock(self, lock_);
282    last->next_ = free_arenas_;
283    free_arenas_ = first;
284  }
285}
286
287size_t ArenaAllocator::BytesAllocated() const {
288  return ArenaAllocatorStats::BytesAllocated();
289}
290
291size_t ArenaAllocator::BytesUsed() const {
292  size_t total = ptr_ - begin_;
293  if (arena_head_ != nullptr) {
294    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
295         cur_arena = cur_arena->next_) {
296     total += cur_arena->GetBytesAllocated();
297    }
298  }
299  return total;
300}
301
302ArenaAllocator::ArenaAllocator(ArenaPool* pool)
303  : pool_(pool),
304    begin_(nullptr),
305    end_(nullptr),
306    ptr_(nullptr),
307    arena_head_(nullptr) {
308}
309
310void ArenaAllocator::UpdateBytesAllocated() {
311  if (arena_head_ != nullptr) {
312    // Update how many bytes we have allocated into the arena so that the arena pool knows how
313    // much memory to zero out.
314    arena_head_->bytes_allocated_ = ptr_ - begin_;
315  }
316}
317
318void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
319  // We mark all memory for a newly retrieved arena as inaccessible and then
320  // mark only the actually allocated memory as defined. That leaves red zones
321  // and padding between allocations marked as inaccessible.
322  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
323  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
324    // Obtain a new block.
325    ObtainNewArenaForAllocation(rounded_bytes);
326    CHECK(ptr_ != nullptr);
327    MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
328  }
329  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
330  uint8_t* ret = ptr_;
331  ptr_ += rounded_bytes;
332  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
333  // Check that the memory is already zeroed out.
334  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
335  return ret;
336}
337
338ArenaAllocator::~ArenaAllocator() {
339  // Reclaim all the arenas by giving them back to the thread pool.
340  UpdateBytesAllocated();
341  pool_->FreeArenaChain(arena_head_);
342}
343
344void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
345  UpdateBytesAllocated();
346  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
347  new_arena->next_ = arena_head_;
348  arena_head_ = new_arena;
349  // Update our internal data structures.
350  ptr_ = begin_ = new_arena->Begin();
351  end_ = new_arena->End();
352}
353
354bool ArenaAllocator::Contains(const void* ptr) const {
355  if (ptr >= begin_ && ptr < end_) {
356    return true;
357  }
358  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
359    if (cur_arena->Contains(ptr)) {
360      return true;
361    }
362  }
363  return false;
364}
365
366MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
367                   ssize_t lost_bytes_adjustment)
368    : name_(name),
369      stats_(stats),
370      first_arena_(first_arena),
371      lost_bytes_adjustment_(lost_bytes_adjustment) {
372}
373
374void MemStats::Dump(std::ostream& os) const {
375  os << name_ << " stats:\n";
376  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
377}
378
379// Dump memory usage stats.
380MemStats ArenaAllocator::GetMemStats() const {
381  ssize_t lost_bytes_adjustment =
382      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
383  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
384}
385
386}  // namespace art
387