arena_allocator.cc revision 91e11c0c840193c6822e66846020b6647de243d5
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26#include "base/memory_tool.h"
27
28namespace art {
29
30static constexpr size_t kMemoryToolRedZoneBytes = 8;
31constexpr size_t Arena::kDefaultSize;
32
33template <bool kCount>
34const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35  "Misc         ",
36  "BBList       ",
37  "BBPreds      ",
38  "DfsPreOrd    ",
39  "DfsPostOrd   ",
40  "DomPostOrd   ",
41  "TopoOrd      ",
42  "Lowering     ",
43  "LIR          ",
44  "LIR masks    ",
45  "SwitchTbl    ",
46  "FillArray    ",
47  "SlowPaths    ",
48  "MIR          ",
49  "DataFlow     ",
50  "GrowList     ",
51  "GrowBitMap   ",
52  "SSA2Dalvik   ",
53  "Dalvik2SSA   ",
54  "DebugInfo    ",
55  "RegAlloc     ",
56  "Data         ",
57  "STL          ",
58  "Graph        ",
59  "BasicBlock   ",
60  "Predecessors ",
61  "Successors   ",
62  "Dominated    ",
63  "Instruction  ",
64  "LoopInfo     ",
65  "TryCatchInf  ",
66  "UseListNode  ",
67  "Environment  ",
68  "MoveOperands ",
69  "CodeBuffer   ",
70  "StackMaps    ",
71  "BaselineMaps ",
72  "Optimization ",
73};
74
75template <bool kCount>
76ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
77    : num_allocations_(0u) {
78  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
79}
80
81template <bool kCount>
82void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
83  num_allocations_ = other.num_allocations_;
84  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
85}
86
87template <bool kCount>
88void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
89  alloc_stats_[kind] += bytes;
90  ++num_allocations_;
91}
92
93template <bool kCount>
94size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
95  return num_allocations_;
96}
97
98template <bool kCount>
99size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
100  const size_t init = 0u;  // Initial value of the correct type.
101  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
102}
103
104template <bool kCount>
105void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
106                                           ssize_t lost_bytes_adjustment) const {
107  size_t malloc_bytes = 0u;
108  size_t lost_bytes = 0u;
109  size_t num_arenas = 0u;
110  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
111    malloc_bytes += arena->Size();
112    lost_bytes += arena->RemainingSpace();
113    ++num_arenas;
114  }
115  // The lost_bytes_adjustment is used to make up for the fact that the current arena
116  // may not have the bytes_allocated_ updated correctly.
117  lost_bytes += lost_bytes_adjustment;
118  const size_t bytes_allocated = BytesAllocated();
119  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
120     << ", lost: " << lost_bytes << "\n";
121  size_t num_allocations = NumAllocations();
122  if (num_allocations != 0) {
123    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
124       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
125  }
126  os << "===== Allocation by kind\n";
127  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
128  for (int i = 0; i < kNumArenaAllocKinds; i++) {
129      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
130  }
131}
132
133// Explicitly instantiate the used implementation.
134template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
135
136Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
137}
138
139MallocArena::MallocArena(size_t size) {
140  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
141  size_ = size;
142}
143
144MallocArena::~MallocArena() {
145  free(reinterpret_cast<void*>(memory_));
146}
147
148MemMapArena::MemMapArena(size_t size, bool low_4gb) {
149  std::string error_msg;
150  map_.reset(MemMap::MapAnonymous(
151      "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
152  CHECK(map_.get() != nullptr) << error_msg;
153  memory_ = map_->Begin();
154  size_ = map_->Size();
155}
156
157MemMapArena::~MemMapArena() {
158  // Destroys MemMap via std::unique_ptr<>.
159}
160
161void MemMapArena::Release() {
162  if (bytes_allocated_ > 0) {
163    map_->MadviseDontNeedAndZero();
164    bytes_allocated_ = 0;
165  }
166}
167
168void Arena::Reset() {
169  if (bytes_allocated_ > 0) {
170    memset(Begin(), 0, bytes_allocated_);
171    bytes_allocated_ = 0;
172  }
173}
174
175ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
176    : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
177      low_4gb_(low_4gb) {
178  if (low_4gb) {
179    CHECK(!use_malloc) << "low4gb must use map implementation";
180  }
181  if (!use_malloc) {
182    MemMap::Init();
183  }
184}
185
186ArenaPool::~ArenaPool() {
187  while (free_arenas_ != nullptr) {
188    auto* arena = free_arenas_;
189    free_arenas_ = free_arenas_->next_;
190    delete arena;
191  }
192}
193
194Arena* ArenaPool::AllocArena(size_t size) {
195  Thread* self = Thread::Current();
196  Arena* ret = nullptr;
197  {
198    MutexLock lock(self, lock_);
199    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
200      ret = free_arenas_;
201      free_arenas_ = free_arenas_->next_;
202    }
203  }
204  if (ret == nullptr) {
205    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
206        new MemMapArena(size, low_4gb_);
207  }
208  ret->Reset();
209  return ret;
210}
211
212void ArenaPool::TrimMaps() {
213  if (!use_malloc_) {
214    // Doesn't work for malloc.
215    MutexLock lock(Thread::Current(), lock_);
216    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
217      arena->Release();
218    }
219  }
220}
221
222size_t ArenaPool::GetBytesAllocated() const {
223  size_t total = 0;
224  MutexLock lock(Thread::Current(), lock_);
225  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
226    total += arena->GetBytesAllocated();
227  }
228  return total;
229}
230
231void ArenaPool::FreeArenaChain(Arena* first) {
232  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
233    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
234      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
235    }
236  }
237  if (first != nullptr) {
238    Arena* last = first;
239    while (last->next_ != nullptr) {
240      last = last->next_;
241    }
242    Thread* self = Thread::Current();
243    MutexLock lock(self, lock_);
244    last->next_ = free_arenas_;
245    free_arenas_ = first;
246  }
247}
248
249size_t ArenaAllocator::BytesAllocated() const {
250  return ArenaAllocatorStats::BytesAllocated();
251}
252
253size_t ArenaAllocator::BytesUsed() const {
254  size_t total = ptr_ - begin_;
255  if (arena_head_ != nullptr) {
256    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
257         cur_arena = cur_arena->next_) {
258     total += cur_arena->GetBytesAllocated();
259    }
260  }
261  return total;
262}
263
264ArenaAllocator::ArenaAllocator(ArenaPool* pool)
265  : pool_(pool),
266    begin_(nullptr),
267    end_(nullptr),
268    ptr_(nullptr),
269    arena_head_(nullptr),
270    is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
271}
272
273void ArenaAllocator::UpdateBytesAllocated() {
274  if (arena_head_ != nullptr) {
275    // Update how many bytes we have allocated into the arena so that the arena pool knows how
276    // much memory to zero out.
277    arena_head_->bytes_allocated_ = ptr_ - begin_;
278  }
279}
280
281void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
282  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
283  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
284    // Obtain a new block.
285    ObtainNewArenaForAllocation(rounded_bytes);
286    if (UNLIKELY(ptr_ == nullptr)) {
287      return nullptr;
288    }
289  }
290  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
291  uint8_t* ret = ptr_;
292  ptr_ += rounded_bytes;
293  // Check that the memory is already zeroed out.
294  for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
295    CHECK_EQ(*ptr, 0U);
296  }
297  MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
298  return ret;
299}
300
301ArenaAllocator::~ArenaAllocator() {
302  // Reclaim all the arenas by giving them back to the thread pool.
303  UpdateBytesAllocated();
304  pool_->FreeArenaChain(arena_head_);
305}
306
307void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
308  UpdateBytesAllocated();
309  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
310  new_arena->next_ = arena_head_;
311  arena_head_ = new_arena;
312  // Update our internal data structures.
313  ptr_ = begin_ = new_arena->Begin();
314  end_ = new_arena->End();
315}
316
317bool ArenaAllocator::Contains(const void* ptr) const {
318  if (ptr >= begin_ && ptr < end_) {
319    return true;
320  }
321  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
322    if (cur_arena->Contains(ptr)) {
323      return true;
324    }
325  }
326  return false;
327}
328
329MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
330                   ssize_t lost_bytes_adjustment)
331    : name_(name),
332      stats_(stats),
333      first_arena_(first_arena),
334      lost_bytes_adjustment_(lost_bytes_adjustment) {
335}
336
337void MemStats::Dump(std::ostream& os) const {
338  os << name_ << " stats:\n";
339  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
340}
341
342// Dump memory usage stats.
343MemStats ArenaAllocator::GetMemStats() const {
344  ssize_t lost_bytes_adjustment =
345      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
346  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
347}
348
349}  // namespace art
350