arena_allocator.cc revision 93205e395f777c1dd81d3f164cf9a4aec4bde45f
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26#include "systrace.h"
27
28namespace art {
29
30static constexpr size_t kMemoryToolRedZoneBytes = 8;
31constexpr size_t Arena::kDefaultSize;
32
33template <bool kCount>
34const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35  "Misc         ",
36  "SwitchTbl    ",
37  "SlowPaths    ",
38  "GrowBitMap   ",
39  "STL          ",
40  "GraphBuilder ",
41  "Graph        ",
42  "BasicBlock   ",
43  "BlockList    ",
44  "RevPostOrder ",
45  "LinearOrder  ",
46  "ConstantsMap ",
47  "Predecessors ",
48  "Successors   ",
49  "Dominated    ",
50  "Instruction  ",
51  "InvokeInputs ",
52  "PhiInputs    ",
53  "LoopInfo     ",
54  "LIBackEdges  ",
55  "TryCatchInf  ",
56  "UseListNode  ",
57  "Environment  ",
58  "EnvVRegs     ",
59  "EnvLocations ",
60  "LocSummary   ",
61  "SsaBuilder   ",
62  "MoveOperands ",
63  "CodeBuffer   ",
64  "StackMaps    ",
65  "Optimization ",
66  "GVN          ",
67  "InductionVar ",
68  "BCE          ",
69  "DCE          ",
70  "LSE          ",
71  "LICM         ",
72  "SsaLiveness  ",
73  "SsaPhiElim   ",
74  "RefTypeProp  ",
75  "SideEffects  ",
76  "RegAllocator ",
77  "RegAllocVldt ",
78  "StackMapStm  ",
79  "CodeGen      ",
80  "Assembler    ",
81  "ParallelMove ",
82  "GraphChecker ",
83  "Verifier     ",
84  "CallingConv  ",
85};
86
87template <bool kCount>
88ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
89    : num_allocations_(0u) {
90  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
91}
92
93template <bool kCount>
94void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
95  num_allocations_ = other.num_allocations_;
96  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
97}
98
99template <bool kCount>
100void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
101  alloc_stats_[kind] += bytes;
102  ++num_allocations_;
103}
104
105template <bool kCount>
106size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
107  return num_allocations_;
108}
109
110template <bool kCount>
111size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
112  const size_t init = 0u;  // Initial value of the correct type.
113  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
114}
115
116template <bool kCount>
117void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
118                                           ssize_t lost_bytes_adjustment) const {
119  size_t malloc_bytes = 0u;
120  size_t lost_bytes = 0u;
121  size_t num_arenas = 0u;
122  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
123    malloc_bytes += arena->Size();
124    lost_bytes += arena->RemainingSpace();
125    ++num_arenas;
126  }
127  // The lost_bytes_adjustment is used to make up for the fact that the current arena
128  // may not have the bytes_allocated_ updated correctly.
129  lost_bytes += lost_bytes_adjustment;
130  const size_t bytes_allocated = BytesAllocated();
131  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
132     << ", lost: " << lost_bytes << "\n";
133  size_t num_allocations = NumAllocations();
134  if (num_allocations != 0) {
135    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
136       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
137  }
138  os << "===== Allocation by kind\n";
139  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
140  for (int i = 0; i < kNumArenaAllocKinds; i++) {
141      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
142  }
143}
144
145// Explicitly instantiate the used implementation.
146template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
147
148void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
149  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
150}
151
152void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
153  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
154}
155
156void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
157  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
158}
159
160Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
161}
162
163MallocArena::MallocArena(size_t size) {
164  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
165  size_ = size;
166}
167
168MallocArena::~MallocArena() {
169  free(reinterpret_cast<void*>(memory_));
170}
171
172MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
173  std::string error_msg;
174  map_.reset(MemMap::MapAnonymous(
175      name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
176  CHECK(map_.get() != nullptr) << error_msg;
177  memory_ = map_->Begin();
178  size_ = map_->Size();
179}
180
181MemMapArena::~MemMapArena() {
182  // Destroys MemMap via std::unique_ptr<>.
183}
184
185void MemMapArena::Release() {
186  if (bytes_allocated_ > 0) {
187    map_->MadviseDontNeedAndZero();
188    bytes_allocated_ = 0;
189  }
190}
191
192void Arena::Reset() {
193  if (bytes_allocated_ > 0) {
194    memset(Begin(), 0, bytes_allocated_);
195    bytes_allocated_ = 0;
196  }
197}
198
199ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
200    : use_malloc_(use_malloc),
201      lock_("Arena pool lock", kArenaPoolLock),
202      free_arenas_(nullptr),
203      low_4gb_(low_4gb),
204      name_(name) {
205  if (low_4gb) {
206    CHECK(!use_malloc) << "low4gb must use map implementation";
207  }
208  if (!use_malloc) {
209    MemMap::Init();
210  }
211}
212
213ArenaPool::~ArenaPool() {
214  ReclaimMemory();
215}
216
217void ArenaPool::ReclaimMemory() {
218  while (free_arenas_ != nullptr) {
219    auto* arena = free_arenas_;
220    free_arenas_ = free_arenas_->next_;
221    delete arena;
222  }
223}
224
225void ArenaPool::LockReclaimMemory() {
226  MutexLock lock(Thread::Current(), lock_);
227  ReclaimMemory();
228}
229
230Arena* ArenaPool::AllocArena(size_t size) {
231  Thread* self = Thread::Current();
232  Arena* ret = nullptr;
233  {
234    MutexLock lock(self, lock_);
235    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
236      ret = free_arenas_;
237      free_arenas_ = free_arenas_->next_;
238    }
239  }
240  if (ret == nullptr) {
241    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
242        new MemMapArena(size, low_4gb_, name_);
243  }
244  ret->Reset();
245  return ret;
246}
247
248void ArenaPool::TrimMaps() {
249  if (!use_malloc_) {
250    ScopedTrace trace(__PRETTY_FUNCTION__);
251    // Doesn't work for malloc.
252    MutexLock lock(Thread::Current(), lock_);
253    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
254      arena->Release();
255    }
256  }
257}
258
259size_t ArenaPool::GetBytesAllocated() const {
260  size_t total = 0;
261  MutexLock lock(Thread::Current(), lock_);
262  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
263    total += arena->GetBytesAllocated();
264  }
265  return total;
266}
267
268void ArenaPool::FreeArenaChain(Arena* first) {
269  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
270    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
271      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
272    }
273  }
274  if (first != nullptr) {
275    Arena* last = first;
276    while (last->next_ != nullptr) {
277      last = last->next_;
278    }
279    Thread* self = Thread::Current();
280    MutexLock lock(self, lock_);
281    last->next_ = free_arenas_;
282    free_arenas_ = first;
283  }
284}
285
286size_t ArenaAllocator::BytesAllocated() const {
287  return ArenaAllocatorStats::BytesAllocated();
288}
289
290size_t ArenaAllocator::BytesUsed() const {
291  size_t total = ptr_ - begin_;
292  if (arena_head_ != nullptr) {
293    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
294         cur_arena = cur_arena->next_) {
295     total += cur_arena->GetBytesAllocated();
296    }
297  }
298  return total;
299}
300
301ArenaAllocator::ArenaAllocator(ArenaPool* pool)
302  : pool_(pool),
303    begin_(nullptr),
304    end_(nullptr),
305    ptr_(nullptr),
306    arena_head_(nullptr) {
307}
308
309void ArenaAllocator::UpdateBytesAllocated() {
310  if (arena_head_ != nullptr) {
311    // Update how many bytes we have allocated into the arena so that the arena pool knows how
312    // much memory to zero out.
313    arena_head_->bytes_allocated_ = ptr_ - begin_;
314  }
315}
316
317void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
318  // We mark all memory for a newly retrieved arena as inaccessible and then
319  // mark only the actually allocated memory as defined. That leaves red zones
320  // and padding between allocations marked as inaccessible.
321  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
322  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
323    // Obtain a new block.
324    ObtainNewArenaForAllocation(rounded_bytes);
325    CHECK(ptr_ != nullptr);
326    MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
327  }
328  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
329  uint8_t* ret = ptr_;
330  ptr_ += rounded_bytes;
331  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
332  // Check that the memory is already zeroed out.
333  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
334  return ret;
335}
336
337ArenaAllocator::~ArenaAllocator() {
338  // Reclaim all the arenas by giving them back to the thread pool.
339  UpdateBytesAllocated();
340  pool_->FreeArenaChain(arena_head_);
341}
342
343void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
344  UpdateBytesAllocated();
345  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
346  new_arena->next_ = arena_head_;
347  arena_head_ = new_arena;
348  // Update our internal data structures.
349  ptr_ = begin_ = new_arena->Begin();
350  end_ = new_arena->End();
351}
352
353bool ArenaAllocator::Contains(const void* ptr) const {
354  if (ptr >= begin_ && ptr < end_) {
355    return true;
356  }
357  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
358    if (cur_arena->Contains(ptr)) {
359      return true;
360    }
361  }
362  return false;
363}
364
365MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
366                   ssize_t lost_bytes_adjustment)
367    : name_(name),
368      stats_(stats),
369      first_arena_(first_arena),
370      lost_bytes_adjustment_(lost_bytes_adjustment) {
371}
372
373void MemStats::Dump(std::ostream& os) const {
374  os << name_ << " stats:\n";
375  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
376}
377
378// Dump memory usage stats.
379MemStats ArenaAllocator::GetMemStats() const {
380  ssize_t lost_bytes_adjustment =
381      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
382  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
383}
384
385}  // namespace art
386