arena_allocator.cc revision dabdc0fe183d4684f3cf4d70cb09d318cff81b42
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26#include "systrace.h"
27
28namespace art {
29
30static constexpr size_t kMemoryToolRedZoneBytes = 8;
31constexpr size_t Arena::kDefaultSize;
32
33template <bool kCount>
34const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35  "Misc         ",
36  "BBList       ",
37  "BBPreds      ",
38  "DfsPreOrd    ",
39  "DfsPostOrd   ",
40  "DomPostOrd   ",
41  "TopoOrd      ",
42  "Lowering     ",
43  "LIR          ",
44  "LIR masks    ",
45  "SwitchTbl    ",
46  "FillArray    ",
47  "SlowPaths    ",
48  "MIR          ",
49  "DataFlow     ",
50  "GrowList     ",
51  "GrowBitMap   ",
52  "SSA2Dalvik   ",
53  "Dalvik2SSA   ",
54  "DebugInfo    ",
55  "RegAlloc     ",
56  "Data         ",
57  "STL          ",
58  "GraphBuilder ",
59  "Graph        ",
60  "BasicBlock   ",
61  "BlockList    ",
62  "RevPostOrder ",
63  "LinearOrder  ",
64  "ConstantsMap ",
65  "Predecessors ",
66  "Successors   ",
67  "Dominated    ",
68  "Instruction  ",
69  "InvokeInputs ",
70  "PhiInputs    ",
71  "LoopInfo     ",
72  "LIBackEdges  ",
73  "TryCatchInf  ",
74  "UseListNode  ",
75  "Environment  ",
76  "EnvVRegs     ",
77  "EnvLocations ",
78  "LocSummary   ",
79  "SsaBuilder   ",
80  "MoveOperands ",
81  "CodeBuffer   ",
82  "StackMaps    ",
83  "BaselineMaps ",
84  "Optimization ",
85  "GVN          ",
86  "InductionVar ",
87  "BCE          ",
88  "SsaLiveness  ",
89  "SsaPhiElim   ",
90  "RefTypeProp  ",
91  "PrimTypeProp ",
92  "SideEffects  ",
93  "RegAllocator ",
94  "StackMapStm  ",
95  "CodeGen      ",
96  "ParallelMove ",
97  "GraphChecker ",
98  "LSE          ",
99  "Verifier     ",
100};
101
102template <bool kCount>
103ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
104    : num_allocations_(0u) {
105  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
106}
107
108template <bool kCount>
109void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
110  num_allocations_ = other.num_allocations_;
111  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
112}
113
114template <bool kCount>
115void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
116  alloc_stats_[kind] += bytes;
117  ++num_allocations_;
118}
119
120template <bool kCount>
121size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
122  return num_allocations_;
123}
124
125template <bool kCount>
126size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
127  const size_t init = 0u;  // Initial value of the correct type.
128  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
129}
130
131template <bool kCount>
132void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
133                                           ssize_t lost_bytes_adjustment) const {
134  size_t malloc_bytes = 0u;
135  size_t lost_bytes = 0u;
136  size_t num_arenas = 0u;
137  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
138    malloc_bytes += arena->Size();
139    lost_bytes += arena->RemainingSpace();
140    ++num_arenas;
141  }
142  // The lost_bytes_adjustment is used to make up for the fact that the current arena
143  // may not have the bytes_allocated_ updated correctly.
144  lost_bytes += lost_bytes_adjustment;
145  const size_t bytes_allocated = BytesAllocated();
146  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
147     << ", lost: " << lost_bytes << "\n";
148  size_t num_allocations = NumAllocations();
149  if (num_allocations != 0) {
150    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
151       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
152  }
153  os << "===== Allocation by kind\n";
154  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
155  for (int i = 0; i < kNumArenaAllocKinds; i++) {
156      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
157  }
158}
159
160// Explicitly instantiate the used implementation.
161template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
162
163void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
164  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
165}
166
167void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
168  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
169}
170
171void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
172  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
173}
174
175Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
176}
177
178MallocArena::MallocArena(size_t size) {
179  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
180  size_ = size;
181}
182
183MallocArena::~MallocArena() {
184  free(reinterpret_cast<void*>(memory_));
185}
186
187MemMapArena::MemMapArena(size_t size, bool low_4gb) {
188  std::string error_msg;
189  map_.reset(MemMap::MapAnonymous(
190      "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
191  CHECK(map_.get() != nullptr) << error_msg;
192  memory_ = map_->Begin();
193  size_ = map_->Size();
194}
195
196MemMapArena::~MemMapArena() {
197  // Destroys MemMap via std::unique_ptr<>.
198}
199
200void MemMapArena::Release() {
201  if (bytes_allocated_ > 0) {
202    map_->MadviseDontNeedAndZero();
203    bytes_allocated_ = 0;
204  }
205}
206
207void Arena::Reset() {
208  if (bytes_allocated_ > 0) {
209    memset(Begin(), 0, bytes_allocated_);
210    bytes_allocated_ = 0;
211  }
212}
213
214ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
215    : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
216      low_4gb_(low_4gb) {
217  if (low_4gb) {
218    CHECK(!use_malloc) << "low4gb must use map implementation";
219  }
220  if (!use_malloc) {
221    MemMap::Init();
222  }
223}
224
225ArenaPool::~ArenaPool() {
226  ReclaimMemory();
227}
228
229void ArenaPool::ReclaimMemory() {
230  while (free_arenas_ != nullptr) {
231    auto* arena = free_arenas_;
232    free_arenas_ = free_arenas_->next_;
233    delete arena;
234  }
235}
236
237void ArenaPool::LockReclaimMemory() {
238  MutexLock lock(Thread::Current(), lock_);
239  ReclaimMemory();
240}
241
242Arena* ArenaPool::AllocArena(size_t size) {
243  Thread* self = Thread::Current();
244  Arena* ret = nullptr;
245  {
246    MutexLock lock(self, lock_);
247    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
248      ret = free_arenas_;
249      free_arenas_ = free_arenas_->next_;
250    }
251  }
252  if (ret == nullptr) {
253    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
254        new MemMapArena(size, low_4gb_);
255  }
256  ret->Reset();
257  return ret;
258}
259
260void ArenaPool::TrimMaps() {
261  if (!use_malloc_) {
262    ScopedTrace trace(__PRETTY_FUNCTION__);
263    // Doesn't work for malloc.
264    MutexLock lock(Thread::Current(), lock_);
265    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
266      arena->Release();
267    }
268  }
269}
270
271size_t ArenaPool::GetBytesAllocated() const {
272  size_t total = 0;
273  MutexLock lock(Thread::Current(), lock_);
274  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
275    total += arena->GetBytesAllocated();
276  }
277  return total;
278}
279
280void ArenaPool::FreeArenaChain(Arena* first) {
281  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
282    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
283      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
284    }
285  }
286  if (first != nullptr) {
287    Arena* last = first;
288    while (last->next_ != nullptr) {
289      last = last->next_;
290    }
291    Thread* self = Thread::Current();
292    MutexLock lock(self, lock_);
293    last->next_ = free_arenas_;
294    free_arenas_ = first;
295  }
296}
297
298size_t ArenaAllocator::BytesAllocated() const {
299  return ArenaAllocatorStats::BytesAllocated();
300}
301
302size_t ArenaAllocator::BytesUsed() const {
303  size_t total = ptr_ - begin_;
304  if (arena_head_ != nullptr) {
305    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
306         cur_arena = cur_arena->next_) {
307     total += cur_arena->GetBytesAllocated();
308    }
309  }
310  return total;
311}
312
313ArenaAllocator::ArenaAllocator(ArenaPool* pool)
314  : pool_(pool),
315    begin_(nullptr),
316    end_(nullptr),
317    ptr_(nullptr),
318    arena_head_(nullptr) {
319}
320
321void ArenaAllocator::UpdateBytesAllocated() {
322  if (arena_head_ != nullptr) {
323    // Update how many bytes we have allocated into the arena so that the arena pool knows how
324    // much memory to zero out.
325    arena_head_->bytes_allocated_ = ptr_ - begin_;
326  }
327}
328
329void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
330  // We mark all memory for a newly retrieved arena as inaccessible and then
331  // mark only the actually allocated memory as defined. That leaves red zones
332  // and padding between allocations marked as inaccessible.
333  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
334  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
335    // Obtain a new block.
336    ObtainNewArenaForAllocation(rounded_bytes);
337    CHECK(ptr_ != nullptr);
338    MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
339  }
340  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
341  uint8_t* ret = ptr_;
342  ptr_ += rounded_bytes;
343  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
344  // Check that the memory is already zeroed out.
345  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
346  return ret;
347}
348
349ArenaAllocator::~ArenaAllocator() {
350  // Reclaim all the arenas by giving them back to the thread pool.
351  UpdateBytesAllocated();
352  pool_->FreeArenaChain(arena_head_);
353}
354
355void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
356  UpdateBytesAllocated();
357  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
358  new_arena->next_ = arena_head_;
359  arena_head_ = new_arena;
360  // Update our internal data structures.
361  ptr_ = begin_ = new_arena->Begin();
362  end_ = new_arena->End();
363}
364
365bool ArenaAllocator::Contains(const void* ptr) const {
366  if (ptr >= begin_ && ptr < end_) {
367    return true;
368  }
369  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
370    if (cur_arena->Contains(ptr)) {
371      return true;
372    }
373  }
374  return false;
375}
376
377MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
378                   ssize_t lost_bytes_adjustment)
379    : name_(name),
380      stats_(stats),
381      first_arena_(first_arena),
382      lost_bytes_adjustment_(lost_bytes_adjustment) {
383}
384
385void MemStats::Dump(std::ostream& os) const {
386  os << name_ << " stats:\n";
387  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
388}
389
390// Dump memory usage stats.
391MemStats ArenaAllocator::GetMemStats() const {
392  ssize_t lost_bytes_adjustment =
393      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
394  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
395}
396
397}  // namespace art
398