arena_allocator.cc revision 7bda3b600d74c9a5746840ebb8534443e486615a
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26
27namespace art {
28
29static constexpr size_t kMemoryToolRedZoneBytes = 8;
30constexpr size_t Arena::kDefaultSize;
31
32template <bool kCount>
33const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
34  "Misc         ",
35  "BBList       ",
36  "BBPreds      ",
37  "DfsPreOrd    ",
38  "DfsPostOrd   ",
39  "DomPostOrd   ",
40  "TopoOrd      ",
41  "Lowering     ",
42  "LIR          ",
43  "LIR masks    ",
44  "SwitchTbl    ",
45  "FillArray    ",
46  "SlowPaths    ",
47  "MIR          ",
48  "DataFlow     ",
49  "GrowList     ",
50  "GrowBitMap   ",
51  "SSA2Dalvik   ",
52  "Dalvik2SSA   ",
53  "DebugInfo    ",
54  "RegAlloc     ",
55  "Data         ",
56  "STL          ",
57  "GraphBuilder ",
58  "Graph        ",
59  "BasicBlock   ",
60  "BlockList    ",
61  "RevPostOrder ",
62  "LinearOrder  ",
63  "ConstantsMap ",
64  "Predecessors ",
65  "Successors   ",
66  "Dominated    ",
67  "Instruction  ",
68  "InvokeInputs ",
69  "PhiInputs    ",
70  "LoopInfo     ",
71  "LIBackEdges  ",
72  "TryCatchInf  ",
73  "UseListNode  ",
74  "Environment  ",
75  "EnvVRegs     ",
76  "EnvLocations ",
77  "LocSummary   ",
78  "SsaBuilder   ",
79  "MoveOperands ",
80  "CodeBuffer   ",
81  "StackMaps    ",
82  "BaselineMaps ",
83  "Optimization ",
84  "GVN          ",
85  "InductionVar ",
86  "BCE          ",
87  "SsaLiveness  ",
88  "SsaPhiElim   ",
89  "RefTypeProp  ",
90  "PrimTypeProp ",
91  "SideEffects  ",
92  "RegAllocator ",
93  "StackMapStm  ",
94  "CodeGen      ",
95  "ParallelMove ",
96};
97
98template <bool kCount>
99ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
100    : num_allocations_(0u) {
101  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
102}
103
104template <bool kCount>
105void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
106  num_allocations_ = other.num_allocations_;
107  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
108}
109
110template <bool kCount>
111void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
112  alloc_stats_[kind] += bytes;
113  ++num_allocations_;
114}
115
116template <bool kCount>
117size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
118  return num_allocations_;
119}
120
121template <bool kCount>
122size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
123  const size_t init = 0u;  // Initial value of the correct type.
124  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
125}
126
127template <bool kCount>
128void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
129                                           ssize_t lost_bytes_adjustment) const {
130  size_t malloc_bytes = 0u;
131  size_t lost_bytes = 0u;
132  size_t num_arenas = 0u;
133  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
134    malloc_bytes += arena->Size();
135    lost_bytes += arena->RemainingSpace();
136    ++num_arenas;
137  }
138  // The lost_bytes_adjustment is used to make up for the fact that the current arena
139  // may not have the bytes_allocated_ updated correctly.
140  lost_bytes += lost_bytes_adjustment;
141  const size_t bytes_allocated = BytesAllocated();
142  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
143     << ", lost: " << lost_bytes << "\n";
144  size_t num_allocations = NumAllocations();
145  if (num_allocations != 0) {
146    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
147       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
148  }
149  os << "===== Allocation by kind\n";
150  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
151  for (int i = 0; i < kNumArenaAllocKinds; i++) {
152      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
153  }
154}
155
156// Explicitly instantiate the used implementation.
157template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
158
159void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
160  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
161}
162
163void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
164  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
165}
166
167void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
168  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
169}
170
171Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
172}
173
174MallocArena::MallocArena(size_t size) {
175  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
176  size_ = size;
177}
178
179MallocArena::~MallocArena() {
180  free(reinterpret_cast<void*>(memory_));
181}
182
183MemMapArena::MemMapArena(size_t size, bool low_4gb) {
184  std::string error_msg;
185  map_.reset(MemMap::MapAnonymous(
186      "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
187  CHECK(map_.get() != nullptr) << error_msg;
188  memory_ = map_->Begin();
189  size_ = map_->Size();
190}
191
192MemMapArena::~MemMapArena() {
193  // Destroys MemMap via std::unique_ptr<>.
194}
195
196void MemMapArena::Release() {
197  if (bytes_allocated_ > 0) {
198    map_->MadviseDontNeedAndZero();
199    bytes_allocated_ = 0;
200  }
201}
202
203void Arena::Reset() {
204  if (bytes_allocated_ > 0) {
205    memset(Begin(), 0, bytes_allocated_);
206    bytes_allocated_ = 0;
207  }
208}
209
210ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
211    : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
212      low_4gb_(low_4gb) {
213  if (low_4gb) {
214    CHECK(!use_malloc) << "low4gb must use map implementation";
215  }
216  if (!use_malloc) {
217    MemMap::Init();
218  }
219}
220
221ArenaPool::~ArenaPool() {
222  while (free_arenas_ != nullptr) {
223    auto* arena = free_arenas_;
224    free_arenas_ = free_arenas_->next_;
225    delete arena;
226  }
227}
228
229Arena* ArenaPool::AllocArena(size_t size) {
230  Thread* self = Thread::Current();
231  Arena* ret = nullptr;
232  {
233    MutexLock lock(self, lock_);
234    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
235      ret = free_arenas_;
236      free_arenas_ = free_arenas_->next_;
237    }
238  }
239  if (ret == nullptr) {
240    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
241        new MemMapArena(size, low_4gb_);
242  }
243  ret->Reset();
244  return ret;
245}
246
247void ArenaPool::TrimMaps() {
248  if (!use_malloc_) {
249    // Doesn't work for malloc.
250    MutexLock lock(Thread::Current(), lock_);
251    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
252      arena->Release();
253    }
254  }
255}
256
257size_t ArenaPool::GetBytesAllocated() const {
258  size_t total = 0;
259  MutexLock lock(Thread::Current(), lock_);
260  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
261    total += arena->GetBytesAllocated();
262  }
263  return total;
264}
265
266void ArenaPool::FreeArenaChain(Arena* first) {
267  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
268    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
269      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
270    }
271  }
272  if (first != nullptr) {
273    Arena* last = first;
274    while (last->next_ != nullptr) {
275      last = last->next_;
276    }
277    Thread* self = Thread::Current();
278    MutexLock lock(self, lock_);
279    last->next_ = free_arenas_;
280    free_arenas_ = first;
281  }
282}
283
284size_t ArenaAllocator::BytesAllocated() const {
285  return ArenaAllocatorStats::BytesAllocated();
286}
287
288size_t ArenaAllocator::BytesUsed() const {
289  size_t total = ptr_ - begin_;
290  if (arena_head_ != nullptr) {
291    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
292         cur_arena = cur_arena->next_) {
293     total += cur_arena->GetBytesAllocated();
294    }
295  }
296  return total;
297}
298
299ArenaAllocator::ArenaAllocator(ArenaPool* pool)
300  : pool_(pool),
301    begin_(nullptr),
302    end_(nullptr),
303    ptr_(nullptr),
304    arena_head_(nullptr) {
305}
306
307void ArenaAllocator::UpdateBytesAllocated() {
308  if (arena_head_ != nullptr) {
309    // Update how many bytes we have allocated into the arena so that the arena pool knows how
310    // much memory to zero out.
311    arena_head_->bytes_allocated_ = ptr_ - begin_;
312  }
313}
314
315void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
316  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
317  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
318    // Obtain a new block.
319    ObtainNewArenaForAllocation(rounded_bytes);
320    CHECK(ptr_ != nullptr);
321    MEMORY_TOOL_MAKE_UNDEFINED(ptr_, end_ - ptr_);
322  }
323  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
324  uint8_t* ret = ptr_;
325  ptr_ += rounded_bytes;
326  // Check that the memory is already zeroed out.
327  for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
328    CHECK_EQ(*ptr, 0U);
329  }
330  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
331  MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
332  return ret;
333}
334
335ArenaAllocator::~ArenaAllocator() {
336  // Reclaim all the arenas by giving them back to the thread pool.
337  UpdateBytesAllocated();
338  pool_->FreeArenaChain(arena_head_);
339}
340
341void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
342  UpdateBytesAllocated();
343  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
344  new_arena->next_ = arena_head_;
345  arena_head_ = new_arena;
346  // Update our internal data structures.
347  ptr_ = begin_ = new_arena->Begin();
348  end_ = new_arena->End();
349}
350
351bool ArenaAllocator::Contains(const void* ptr) const {
352  if (ptr >= begin_ && ptr < end_) {
353    return true;
354  }
355  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
356    if (cur_arena->Contains(ptr)) {
357      return true;
358    }
359  }
360  return false;
361}
362
363MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
364                   ssize_t lost_bytes_adjustment)
365    : name_(name),
366      stats_(stats),
367      first_arena_(first_arena),
368      lost_bytes_adjustment_(lost_bytes_adjustment) {
369}
370
371void MemStats::Dump(std::ostream& os) const {
372  os << name_ << " stats:\n";
373  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
374}
375
376// Dump memory usage stats.
377MemStats ArenaAllocator::GetMemStats() const {
378  ssize_t lost_bytes_adjustment =
379      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
380  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
381}
382
383}  // namespace art
384