arena_allocator.cc revision 5f9da555e69e7a2e1b4cadd87bbdfcce54c1395d
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26#include "systrace.h"
27
28namespace art {
29
30static constexpr size_t kMemoryToolRedZoneBytes = 8;
31constexpr size_t Arena::kDefaultSize;
32
33template <bool kCount>
34const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35  "Misc         ",
36  "SwitchTbl    ",
37  "SlowPaths    ",
38  "GrowBitMap   ",
39  "STL          ",
40  "GraphBuilder ",
41  "Graph        ",
42  "BasicBlock   ",
43  "BlockList    ",
44  "RevPostOrder ",
45  "LinearOrder  ",
46  "ConstantsMap ",
47  "Predecessors ",
48  "Successors   ",
49  "Dominated    ",
50  "Instruction  ",
51  "InvokeInputs ",
52  "PhiInputs    ",
53  "LoopInfo     ",
54  "LIBackEdges  ",
55  "TryCatchInf  ",
56  "UseListNode  ",
57  "Environment  ",
58  "EnvVRegs     ",
59  "EnvLocations ",
60  "LocSummary   ",
61  "SsaBuilder   ",
62  "MoveOperands ",
63  "CodeBuffer   ",
64  "StackMaps    ",
65  "Optimization ",
66  "GVN          ",
67  "InductionVar ",
68  "BCE          ",
69  "DCE          ",
70  "LSE          ",
71  "LICM         ",
72  "LoopOpt      ",
73  "SsaLiveness  ",
74  "SsaPhiElim   ",
75  "RefTypeProp  ",
76  "SideEffects  ",
77  "RegAllocator ",
78  "RegAllocVldt ",
79  "StackMapStm  ",
80  "CodeGen      ",
81  "Assembler    ",
82  "ParallelMove ",
83  "GraphChecker ",
84  "Verifier     ",
85  "CallingConv  ",
86  "CHA          ",
87  "Scheduler    ",
88};
89
90template <bool kCount>
91ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
92    : num_allocations_(0u),
93      alloc_stats_(kNumArenaAllocKinds, 0u) {
94}
95
96template <bool kCount>
97void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
98  num_allocations_ = other.num_allocations_;
99  std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
100}
101
102template <bool kCount>
103void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
104  alloc_stats_[kind] += bytes;
105  ++num_allocations_;
106}
107
108template <bool kCount>
109size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
110  return num_allocations_;
111}
112
113template <bool kCount>
114size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
115  const size_t init = 0u;  // Initial value of the correct type.
116  return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
117}
118
119template <bool kCount>
120void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
121                                           ssize_t lost_bytes_adjustment) const {
122  size_t malloc_bytes = 0u;
123  size_t lost_bytes = 0u;
124  size_t num_arenas = 0u;
125  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
126    malloc_bytes += arena->Size();
127    lost_bytes += arena->RemainingSpace();
128    ++num_arenas;
129  }
130  // The lost_bytes_adjustment is used to make up for the fact that the current arena
131  // may not have the bytes_allocated_ updated correctly.
132  lost_bytes += lost_bytes_adjustment;
133  const size_t bytes_allocated = BytesAllocated();
134  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
135     << ", lost: " << lost_bytes << "\n";
136  size_t num_allocations = NumAllocations();
137  if (num_allocations != 0) {
138    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
139       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
140  }
141  os << "===== Allocation by kind\n";
142  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
143  for (int i = 0; i < kNumArenaAllocKinds; i++) {
144      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
145  }
146}
147
148#pragma GCC diagnostic push
149#if __clang_major__ >= 4
150#pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
151#endif
152// Explicitly instantiate the used implementation.
153template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
154#pragma GCC diagnostic pop
155
156void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
157  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
158}
159
160void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
161  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
162}
163
164void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
165  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
166}
167
168Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
169}
170
171MallocArena::MallocArena(size_t size) {
172  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
173  CHECK(memory_ != nullptr);  // Abort on OOM.
174  DCHECK_ALIGNED(memory_, ArenaAllocator::kAlignment);
175  size_ = size;
176}
177
178MallocArena::~MallocArena() {
179  free(reinterpret_cast<void*>(memory_));
180}
181
182MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
183  std::string error_msg;
184  map_.reset(MemMap::MapAnonymous(
185      name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
186  CHECK(map_.get() != nullptr) << error_msg;
187  memory_ = map_->Begin();
188  size_ = map_->Size();
189}
190
191MemMapArena::~MemMapArena() {
192  // Destroys MemMap via std::unique_ptr<>.
193}
194
195void MemMapArena::Release() {
196  if (bytes_allocated_ > 0) {
197    map_->MadviseDontNeedAndZero();
198    bytes_allocated_ = 0;
199  }
200}
201
202void Arena::Reset() {
203  if (bytes_allocated_ > 0) {
204    memset(Begin(), 0, bytes_allocated_);
205    bytes_allocated_ = 0;
206  }
207}
208
209ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
210    : use_malloc_(use_malloc),
211      lock_("Arena pool lock", kArenaPoolLock),
212      free_arenas_(nullptr),
213      low_4gb_(low_4gb),
214      name_(name) {
215  if (low_4gb) {
216    CHECK(!use_malloc) << "low4gb must use map implementation";
217  }
218  if (!use_malloc) {
219    MemMap::Init();
220  }
221}
222
223ArenaPool::~ArenaPool() {
224  ReclaimMemory();
225}
226
227void ArenaPool::ReclaimMemory() {
228  while (free_arenas_ != nullptr) {
229    auto* arena = free_arenas_;
230    free_arenas_ = free_arenas_->next_;
231    delete arena;
232  }
233}
234
235void ArenaPool::LockReclaimMemory() {
236  MutexLock lock(Thread::Current(), lock_);
237  ReclaimMemory();
238}
239
240Arena* ArenaPool::AllocArena(size_t size) {
241  Thread* self = Thread::Current();
242  Arena* ret = nullptr;
243  {
244    MutexLock lock(self, lock_);
245    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
246      ret = free_arenas_;
247      free_arenas_ = free_arenas_->next_;
248    }
249  }
250  if (ret == nullptr) {
251    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
252        new MemMapArena(size, low_4gb_, name_);
253  }
254  ret->Reset();
255  return ret;
256}
257
258void ArenaPool::TrimMaps() {
259  if (!use_malloc_) {
260    ScopedTrace trace(__PRETTY_FUNCTION__);
261    // Doesn't work for malloc.
262    MutexLock lock(Thread::Current(), lock_);
263    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
264      arena->Release();
265    }
266  }
267}
268
269size_t ArenaPool::GetBytesAllocated() const {
270  size_t total = 0;
271  MutexLock lock(Thread::Current(), lock_);
272  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
273    total += arena->GetBytesAllocated();
274  }
275  return total;
276}
277
278void ArenaPool::FreeArenaChain(Arena* first) {
279  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
280    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
281      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
282    }
283  }
284  if (first != nullptr) {
285    Arena* last = first;
286    while (last->next_ != nullptr) {
287      last = last->next_;
288    }
289    Thread* self = Thread::Current();
290    MutexLock lock(self, lock_);
291    last->next_ = free_arenas_;
292    free_arenas_ = first;
293  }
294}
295
296size_t ArenaAllocator::BytesAllocated() const {
297  return ArenaAllocatorStats::BytesAllocated();
298}
299
300size_t ArenaAllocator::BytesUsed() const {
301  size_t total = ptr_ - begin_;
302  if (arena_head_ != nullptr) {
303    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
304         cur_arena = cur_arena->next_) {
305     total += cur_arena->GetBytesAllocated();
306    }
307  }
308  return total;
309}
310
311ArenaAllocator::ArenaAllocator(ArenaPool* pool)
312  : pool_(pool),
313    begin_(nullptr),
314    end_(nullptr),
315    ptr_(nullptr),
316    arena_head_(nullptr) {
317}
318
319void ArenaAllocator::UpdateBytesAllocated() {
320  if (arena_head_ != nullptr) {
321    // Update how many bytes we have allocated into the arena so that the arena pool knows how
322    // much memory to zero out.
323    arena_head_->bytes_allocated_ = ptr_ - begin_;
324  }
325}
326
327void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
328  // We mark all memory for a newly retrieved arena as inaccessible and then
329  // mark only the actually allocated memory as defined. That leaves red zones
330  // and padding between allocations marked as inaccessible.
331  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
332  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
333  uint8_t* ret;
334  if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
335    ret = AllocFromNewArena(rounded_bytes);
336    uint8_t* noaccess_begin = ret + bytes;
337    uint8_t* noaccess_end;
338    if (ret == arena_head_->Begin()) {
339      DCHECK(ptr_ - rounded_bytes == ret);
340      noaccess_end = end_;
341    } else {
342      // We're still using the old arena but `ret` comes from a new one just after it.
343      DCHECK(arena_head_->next_ != nullptr);
344      DCHECK(ret == arena_head_->next_->Begin());
345      DCHECK_EQ(rounded_bytes, arena_head_->next_->GetBytesAllocated());
346      noaccess_end = arena_head_->next_->End();
347    }
348    MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
349  } else {
350    ret = ptr_;
351    ptr_ += rounded_bytes;
352  }
353  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
354  // Check that the memory is already zeroed out.
355  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
356  return ret;
357}
358
359ArenaAllocator::~ArenaAllocator() {
360  // Reclaim all the arenas by giving them back to the thread pool.
361  UpdateBytesAllocated();
362  pool_->FreeArenaChain(arena_head_);
363}
364
365uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
366  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, bytes));
367  DCHECK(new_arena != nullptr);
368  DCHECK_LE(bytes, new_arena->Size());
369  if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
370    // The old arena has more space remaining than the new one, so keep using it.
371    // This can happen when the requested size is over half of the default size.
372    DCHECK(arena_head_ != nullptr);
373    new_arena->bytes_allocated_ = bytes;  // UpdateBytesAllocated() on the new_arena.
374    new_arena->next_ = arena_head_->next_;
375    arena_head_->next_ = new_arena;
376  } else {
377    UpdateBytesAllocated();
378    new_arena->next_ = arena_head_;
379    arena_head_ = new_arena;
380    // Update our internal data structures.
381    begin_ = new_arena->Begin();
382    DCHECK_ALIGNED(begin_, kAlignment);
383    ptr_ = begin_ + bytes;
384    end_ = new_arena->End();
385  }
386  return new_arena->Begin();
387}
388
389bool ArenaAllocator::Contains(const void* ptr) const {
390  if (ptr >= begin_ && ptr < end_) {
391    return true;
392  }
393  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
394    if (cur_arena->Contains(ptr)) {
395      return true;
396    }
397  }
398  return false;
399}
400
401MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
402                   ssize_t lost_bytes_adjustment)
403    : name_(name),
404      stats_(stats),
405      first_arena_(first_arena),
406      lost_bytes_adjustment_(lost_bytes_adjustment) {
407}
408
409void MemStats::Dump(std::ostream& os) const {
410  os << name_ << " stats:\n";
411  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
412}
413
414// Dump memory usage stats.
415MemStats ArenaAllocator::GetMemStats() const {
416  ssize_t lost_bytes_adjustment =
417      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
418  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
419}
420
421}  // namespace art
422