arena_allocator.cc revision 225b6464a58ebe11c156144653f11a1c6607f4eb
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26#include "base/memory_tool.h"
27
28namespace art {
29
30static constexpr size_t kMemoryToolRedZoneBytes = 8;
31constexpr size_t Arena::kDefaultSize;
32
33template <bool kCount>
34const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35  "Misc         ",
36  "BBList       ",
37  "BBPreds      ",
38  "DfsPreOrd    ",
39  "DfsPostOrd   ",
40  "DomPostOrd   ",
41  "TopoOrd      ",
42  "Lowering     ",
43  "LIR          ",
44  "LIR masks    ",
45  "SwitchTbl    ",
46  "FillArray    ",
47  "SlowPaths    ",
48  "MIR          ",
49  "DataFlow     ",
50  "GrowList     ",
51  "GrowBitMap   ",
52  "SSA2Dalvik   ",
53  "Dalvik2SSA   ",
54  "DebugInfo    ",
55  "RegAlloc     ",
56  "Data         ",
57  "STL          ",
58  "GraphBuilder ",
59  "Graph        ",
60  "BasicBlock   ",
61  "BlockList    ",
62  "RevPostOrder ",
63  "LinearOrder  ",
64  "ConstantsMap ",
65  "Predecessors ",
66  "Successors   ",
67  "Dominated    ",
68  "Instruction  ",
69  "InvokeInputs ",
70  "PhiInputs    ",
71  "LoopInfo     ",
72  "LIBackEdges  ",
73  "TryCatchInf  ",
74  "UseListNode  ",
75  "Environment  ",
76  "EnvVRegs     ",
77  "EnvLocations ",
78  "LocSummary   ",
79  "SsaBuilder   ",
80  "MoveOperands ",
81  "CodeBuffer   ",
82  "StackMaps    ",
83  "BaselineMaps ",
84  "Optimization ",
85  "GVN          ",
86  "SsaLiveness  ",
87  "SsaPhiElim   ",
88  "RefTypeProp  ",
89  "PrimTypeProp ",
90  "SideEffects  ",
91  "RegAllocator ",
92  "StackMapStm  ",
93  "CodeGen      ",
94  "ParallelMove ",
95};
96
97template <bool kCount>
98ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
99    : num_allocations_(0u) {
100  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
101}
102
103template <bool kCount>
104void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
105  num_allocations_ = other.num_allocations_;
106  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
107}
108
109template <bool kCount>
110void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
111  alloc_stats_[kind] += bytes;
112  ++num_allocations_;
113}
114
115template <bool kCount>
116size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
117  return num_allocations_;
118}
119
120template <bool kCount>
121size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
122  const size_t init = 0u;  // Initial value of the correct type.
123  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
124}
125
126template <bool kCount>
127void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
128                                           ssize_t lost_bytes_adjustment) const {
129  size_t malloc_bytes = 0u;
130  size_t lost_bytes = 0u;
131  size_t num_arenas = 0u;
132  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
133    malloc_bytes += arena->Size();
134    lost_bytes += arena->RemainingSpace();
135    ++num_arenas;
136  }
137  // The lost_bytes_adjustment is used to make up for the fact that the current arena
138  // may not have the bytes_allocated_ updated correctly.
139  lost_bytes += lost_bytes_adjustment;
140  const size_t bytes_allocated = BytesAllocated();
141  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
142     << ", lost: " << lost_bytes << "\n";
143  size_t num_allocations = NumAllocations();
144  if (num_allocations != 0) {
145    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
146       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
147  }
148  os << "===== Allocation by kind\n";
149  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
150  for (int i = 0; i < kNumArenaAllocKinds; i++) {
151      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
152  }
153}
154
155// Explicitly instantiate the used implementation.
156template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
157
158Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
159}
160
161MallocArena::MallocArena(size_t size) {
162  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
163  size_ = size;
164}
165
166MallocArena::~MallocArena() {
167  free(reinterpret_cast<void*>(memory_));
168}
169
170MemMapArena::MemMapArena(size_t size, bool low_4gb) {
171  std::string error_msg;
172  map_.reset(MemMap::MapAnonymous(
173      "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
174  CHECK(map_.get() != nullptr) << error_msg;
175  memory_ = map_->Begin();
176  size_ = map_->Size();
177}
178
179MemMapArena::~MemMapArena() {
180  // Destroys MemMap via std::unique_ptr<>.
181}
182
183void MemMapArena::Release() {
184  if (bytes_allocated_ > 0) {
185    map_->MadviseDontNeedAndZero();
186    bytes_allocated_ = 0;
187  }
188}
189
190void Arena::Reset() {
191  if (bytes_allocated_ > 0) {
192    memset(Begin(), 0, bytes_allocated_);
193    bytes_allocated_ = 0;
194  }
195}
196
197ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
198    : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
199      low_4gb_(low_4gb) {
200  if (low_4gb) {
201    CHECK(!use_malloc) << "low4gb must use map implementation";
202  }
203  if (!use_malloc) {
204    MemMap::Init();
205  }
206}
207
208ArenaPool::~ArenaPool() {
209  while (free_arenas_ != nullptr) {
210    auto* arena = free_arenas_;
211    free_arenas_ = free_arenas_->next_;
212    delete arena;
213  }
214}
215
216Arena* ArenaPool::AllocArena(size_t size) {
217  Thread* self = Thread::Current();
218  Arena* ret = nullptr;
219  {
220    MutexLock lock(self, lock_);
221    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
222      ret = free_arenas_;
223      free_arenas_ = free_arenas_->next_;
224    }
225  }
226  if (ret == nullptr) {
227    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
228        new MemMapArena(size, low_4gb_);
229  }
230  ret->Reset();
231  return ret;
232}
233
234void ArenaPool::TrimMaps() {
235  if (!use_malloc_) {
236    // Doesn't work for malloc.
237    MutexLock lock(Thread::Current(), lock_);
238    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
239      arena->Release();
240    }
241  }
242}
243
244size_t ArenaPool::GetBytesAllocated() const {
245  size_t total = 0;
246  MutexLock lock(Thread::Current(), lock_);
247  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
248    total += arena->GetBytesAllocated();
249  }
250  return total;
251}
252
253void ArenaPool::FreeArenaChain(Arena* first) {
254  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
255    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
256      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
257    }
258  }
259  if (first != nullptr) {
260    Arena* last = first;
261    while (last->next_ != nullptr) {
262      last = last->next_;
263    }
264    Thread* self = Thread::Current();
265    MutexLock lock(self, lock_);
266    last->next_ = free_arenas_;
267    free_arenas_ = first;
268  }
269}
270
271size_t ArenaAllocator::BytesAllocated() const {
272  return ArenaAllocatorStats::BytesAllocated();
273}
274
275size_t ArenaAllocator::BytesUsed() const {
276  size_t total = ptr_ - begin_;
277  if (arena_head_ != nullptr) {
278    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
279         cur_arena = cur_arena->next_) {
280     total += cur_arena->GetBytesAllocated();
281    }
282  }
283  return total;
284}
285
286ArenaAllocator::ArenaAllocator(ArenaPool* pool)
287  : pool_(pool),
288    begin_(nullptr),
289    end_(nullptr),
290    ptr_(nullptr),
291    arena_head_(nullptr),
292    is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
293}
294
295void ArenaAllocator::UpdateBytesAllocated() {
296  if (arena_head_ != nullptr) {
297    // Update how many bytes we have allocated into the arena so that the arena pool knows how
298    // much memory to zero out.
299    arena_head_->bytes_allocated_ = ptr_ - begin_;
300  }
301}
302
303void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
304  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
305  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
306    // Obtain a new block.
307    ObtainNewArenaForAllocation(rounded_bytes);
308    if (UNLIKELY(ptr_ == nullptr)) {
309      return nullptr;
310    }
311  }
312  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
313  uint8_t* ret = ptr_;
314  ptr_ += rounded_bytes;
315  // Check that the memory is already zeroed out.
316  for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
317    CHECK_EQ(*ptr, 0U);
318  }
319  MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
320  return ret;
321}
322
323ArenaAllocator::~ArenaAllocator() {
324  // Reclaim all the arenas by giving them back to the thread pool.
325  UpdateBytesAllocated();
326  pool_->FreeArenaChain(arena_head_);
327}
328
329void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
330  UpdateBytesAllocated();
331  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
332  new_arena->next_ = arena_head_;
333  arena_head_ = new_arena;
334  // Update our internal data structures.
335  ptr_ = begin_ = new_arena->Begin();
336  end_ = new_arena->End();
337}
338
339bool ArenaAllocator::Contains(const void* ptr) const {
340  if (ptr >= begin_ && ptr < end_) {
341    return true;
342  }
343  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
344    if (cur_arena->Contains(ptr)) {
345      return true;
346    }
347  }
348  return false;
349}
350
351MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
352                   ssize_t lost_bytes_adjustment)
353    : name_(name),
354      stats_(stats),
355      first_arena_(first_arena),
356      lost_bytes_adjustment_(lost_bytes_adjustment) {
357}
358
359void MemStats::Dump(std::ostream& os) const {
360  os << name_ << " stats:\n";
361  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
362}
363
364// Dump memory usage stats.
365MemStats ArenaAllocator::GetMemStats() const {
366  ssize_t lost_bytes_adjustment =
367      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
368  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
369}
370
371}  // namespace art
372