arena_allocator.cc revision 3a40bf2fc1b9823e3bc8fcf96d5242668b5c088b
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26#include "systrace.h"
27
28namespace art {
29
30static constexpr size_t kMemoryToolRedZoneBytes = 8;
31constexpr size_t Arena::kDefaultSize;
32
33template <bool kCount>
34const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35  "Misc         ",
36  "SwitchTbl    ",
37  "SlowPaths    ",
38  "GrowBitMap   ",
39  "STL          ",
40  "GraphBuilder ",
41  "Graph        ",
42  "BasicBlock   ",
43  "BlockList    ",
44  "RevPostOrder ",
45  "LinearOrder  ",
46  "ConstantsMap ",
47  "Predecessors ",
48  "Successors   ",
49  "Dominated    ",
50  "Instruction  ",
51  "InvokeInputs ",
52  "PhiInputs    ",
53  "LoopInfo     ",
54  "LIBackEdges  ",
55  "TryCatchInf  ",
56  "UseListNode  ",
57  "Environment  ",
58  "EnvVRegs     ",
59  "EnvLocations ",
60  "LocSummary   ",
61  "SsaBuilder   ",
62  "MoveOperands ",
63  "CodeBuffer   ",
64  "StackMaps    ",
65  "Optimization ",
66  "GVN          ",
67  "InductionVar ",
68  "BCE          ",
69  "DCE          ",
70  "LSE          ",
71  "LICM         ",
72  "SsaLiveness  ",
73  "SsaPhiElim   ",
74  "RefTypeProp  ",
75  "SideEffects  ",
76  "RegAllocator ",
77  "RegAllocVldt ",
78  "StackMapStm  ",
79  "CodeGen      ",
80  "ParallelMove ",
81  "GraphChecker ",
82  "Verifier     ",
83};
84
85template <bool kCount>
86ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
87    : num_allocations_(0u) {
88  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
89}
90
91template <bool kCount>
92void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
93  num_allocations_ = other.num_allocations_;
94  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
95}
96
97template <bool kCount>
98void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
99  alloc_stats_[kind] += bytes;
100  ++num_allocations_;
101}
102
103template <bool kCount>
104size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
105  return num_allocations_;
106}
107
108template <bool kCount>
109size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
110  const size_t init = 0u;  // Initial value of the correct type.
111  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
112}
113
114template <bool kCount>
115void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
116                                           ssize_t lost_bytes_adjustment) const {
117  size_t malloc_bytes = 0u;
118  size_t lost_bytes = 0u;
119  size_t num_arenas = 0u;
120  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
121    malloc_bytes += arena->Size();
122    lost_bytes += arena->RemainingSpace();
123    ++num_arenas;
124  }
125  // The lost_bytes_adjustment is used to make up for the fact that the current arena
126  // may not have the bytes_allocated_ updated correctly.
127  lost_bytes += lost_bytes_adjustment;
128  const size_t bytes_allocated = BytesAllocated();
129  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
130     << ", lost: " << lost_bytes << "\n";
131  size_t num_allocations = NumAllocations();
132  if (num_allocations != 0) {
133    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
134       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
135  }
136  os << "===== Allocation by kind\n";
137  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
138  for (int i = 0; i < kNumArenaAllocKinds; i++) {
139      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
140  }
141}
142
143// Explicitly instantiate the used implementation.
144template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
145
146void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
147  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
148}
149
150void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
151  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
152}
153
154void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
155  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
156}
157
158Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
159}
160
161MallocArena::MallocArena(size_t size) {
162  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
163  size_ = size;
164}
165
166MallocArena::~MallocArena() {
167  free(reinterpret_cast<void*>(memory_));
168}
169
170MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
171  std::string error_msg;
172  map_.reset(MemMap::MapAnonymous(
173      name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
174  CHECK(map_.get() != nullptr) << error_msg;
175  memory_ = map_->Begin();
176  size_ = map_->Size();
177}
178
179MemMapArena::~MemMapArena() {
180  // Destroys MemMap via std::unique_ptr<>.
181}
182
183void MemMapArena::Release() {
184  if (bytes_allocated_ > 0) {
185    map_->MadviseDontNeedAndZero();
186    bytes_allocated_ = 0;
187  }
188}
189
190void Arena::Reset() {
191  if (bytes_allocated_ > 0) {
192    memset(Begin(), 0, bytes_allocated_);
193    bytes_allocated_ = 0;
194  }
195}
196
197ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
198    : use_malloc_(use_malloc),
199      lock_("Arena pool lock", kArenaPoolLock),
200      free_arenas_(nullptr),
201      low_4gb_(low_4gb),
202      name_(name) {
203  if (low_4gb) {
204    CHECK(!use_malloc) << "low4gb must use map implementation";
205  }
206  if (!use_malloc) {
207    MemMap::Init();
208  }
209}
210
211ArenaPool::~ArenaPool() {
212  ReclaimMemory();
213}
214
215void ArenaPool::ReclaimMemory() {
216  while (free_arenas_ != nullptr) {
217    auto* arena = free_arenas_;
218    free_arenas_ = free_arenas_->next_;
219    delete arena;
220  }
221}
222
223void ArenaPool::LockReclaimMemory() {
224  MutexLock lock(Thread::Current(), lock_);
225  ReclaimMemory();
226}
227
228Arena* ArenaPool::AllocArena(size_t size) {
229  Thread* self = Thread::Current();
230  Arena* ret = nullptr;
231  {
232    MutexLock lock(self, lock_);
233    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
234      ret = free_arenas_;
235      free_arenas_ = free_arenas_->next_;
236    }
237  }
238  if (ret == nullptr) {
239    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
240        new MemMapArena(size, low_4gb_, name_);
241  }
242  ret->Reset();
243  return ret;
244}
245
246void ArenaPool::TrimMaps() {
247  if (!use_malloc_) {
248    ScopedTrace trace(__PRETTY_FUNCTION__);
249    // Doesn't work for malloc.
250    MutexLock lock(Thread::Current(), lock_);
251    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
252      arena->Release();
253    }
254  }
255}
256
257size_t ArenaPool::GetBytesAllocated() const {
258  size_t total = 0;
259  MutexLock lock(Thread::Current(), lock_);
260  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
261    total += arena->GetBytesAllocated();
262  }
263  return total;
264}
265
266void ArenaPool::FreeArenaChain(Arena* first) {
267  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
268    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
269      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
270    }
271  }
272  if (first != nullptr) {
273    Arena* last = first;
274    while (last->next_ != nullptr) {
275      last = last->next_;
276    }
277    Thread* self = Thread::Current();
278    MutexLock lock(self, lock_);
279    last->next_ = free_arenas_;
280    free_arenas_ = first;
281  }
282}
283
284size_t ArenaAllocator::BytesAllocated() const {
285  return ArenaAllocatorStats::BytesAllocated();
286}
287
288size_t ArenaAllocator::BytesUsed() const {
289  size_t total = ptr_ - begin_;
290  if (arena_head_ != nullptr) {
291    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
292         cur_arena = cur_arena->next_) {
293     total += cur_arena->GetBytesAllocated();
294    }
295  }
296  return total;
297}
298
299ArenaAllocator::ArenaAllocator(ArenaPool* pool)
300  : pool_(pool),
301    begin_(nullptr),
302    end_(nullptr),
303    ptr_(nullptr),
304    arena_head_(nullptr) {
305}
306
307void ArenaAllocator::UpdateBytesAllocated() {
308  if (arena_head_ != nullptr) {
309    // Update how many bytes we have allocated into the arena so that the arena pool knows how
310    // much memory to zero out.
311    arena_head_->bytes_allocated_ = ptr_ - begin_;
312  }
313}
314
315void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
316  // We mark all memory for a newly retrieved arena as inaccessible and then
317  // mark only the actually allocated memory as defined. That leaves red zones
318  // and padding between allocations marked as inaccessible.
319  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
320  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
321    // Obtain a new block.
322    ObtainNewArenaForAllocation(rounded_bytes);
323    CHECK(ptr_ != nullptr);
324    MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
325  }
326  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
327  uint8_t* ret = ptr_;
328  ptr_ += rounded_bytes;
329  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
330  // Check that the memory is already zeroed out.
331  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
332  return ret;
333}
334
335ArenaAllocator::~ArenaAllocator() {
336  // Reclaim all the arenas by giving them back to the thread pool.
337  UpdateBytesAllocated();
338  pool_->FreeArenaChain(arena_head_);
339}
340
341void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
342  UpdateBytesAllocated();
343  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
344  new_arena->next_ = arena_head_;
345  arena_head_ = new_arena;
346  // Update our internal data structures.
347  ptr_ = begin_ = new_arena->Begin();
348  end_ = new_arena->End();
349}
350
351bool ArenaAllocator::Contains(const void* ptr) const {
352  if (ptr >= begin_ && ptr < end_) {
353    return true;
354  }
355  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
356    if (cur_arena->Contains(ptr)) {
357      return true;
358    }
359  }
360  return false;
361}
362
363MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
364                   ssize_t lost_bytes_adjustment)
365    : name_(name),
366      stats_(stats),
367      first_arena_(first_arena),
368      lost_bytes_adjustment_(lost_bytes_adjustment) {
369}
370
371void MemStats::Dump(std::ostream& os) const {
372  os << name_ << " stats:\n";
373  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
374}
375
376// Dump memory usage stats.
377MemStats ArenaAllocator::GetMemStats() const {
378  ssize_t lost_bytes_adjustment =
379      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
380  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
381}
382
383}  // namespace art
384