arena_allocator.cc revision 2a408a3bef330551818f9cec9a7c5aa7a3f1129e
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26
27namespace art {
28
29static constexpr size_t kMemoryToolRedZoneBytes = 8;
30constexpr size_t Arena::kDefaultSize;
31
32template <bool kCount>
33const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
34  "Misc         ",
35  "BBList       ",
36  "BBPreds      ",
37  "DfsPreOrd    ",
38  "DfsPostOrd   ",
39  "DomPostOrd   ",
40  "TopoOrd      ",
41  "Lowering     ",
42  "LIR          ",
43  "LIR masks    ",
44  "SwitchTbl    ",
45  "FillArray    ",
46  "SlowPaths    ",
47  "MIR          ",
48  "DataFlow     ",
49  "GrowList     ",
50  "GrowBitMap   ",
51  "SSA2Dalvik   ",
52  "Dalvik2SSA   ",
53  "DebugInfo    ",
54  "RegAlloc     ",
55  "Data         ",
56  "STL          ",
57  "Graph        ",
58  "BasicBlock   ",
59  "BlockList    ",
60  "RevPostOrder ",
61  "LinearOrder  ",
62  "ConstantsMap ",
63  "Predecessors ",
64  "Successors   ",
65  "Dominated    ",
66  "Instruction  ",
67  "InvokeInputs ",
68  "PhiInputs    ",
69  "LoopInfo     ",
70  "LIBackEdges  ",
71  "TryCatchInf  ",
72  "UseListNode  ",
73  "Environment  ",
74  "EnvVRegs     ",
75  "EnvLocations ",
76  "SsaBuilder   ",
77  "MoveOperands ",
78  "CodeBuffer   ",
79  "StackMaps    ",
80  "BaselineMaps ",
81  "Optimization ",
82};
83
84template <bool kCount>
85ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
86    : num_allocations_(0u) {
87  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
88}
89
90template <bool kCount>
91void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
92  num_allocations_ = other.num_allocations_;
93  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
94}
95
96template <bool kCount>
97void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
98  alloc_stats_[kind] += bytes;
99  ++num_allocations_;
100}
101
102template <bool kCount>
103size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
104  return num_allocations_;
105}
106
107template <bool kCount>
108size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
109  const size_t init = 0u;  // Initial value of the correct type.
110  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
111}
112
113template <bool kCount>
114void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
115                                           ssize_t lost_bytes_adjustment) const {
116  size_t malloc_bytes = 0u;
117  size_t lost_bytes = 0u;
118  size_t num_arenas = 0u;
119  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
120    malloc_bytes += arena->Size();
121    lost_bytes += arena->RemainingSpace();
122    ++num_arenas;
123  }
124  // The lost_bytes_adjustment is used to make up for the fact that the current arena
125  // may not have the bytes_allocated_ updated correctly.
126  lost_bytes += lost_bytes_adjustment;
127  const size_t bytes_allocated = BytesAllocated();
128  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
129     << ", lost: " << lost_bytes << "\n";
130  size_t num_allocations = NumAllocations();
131  if (num_allocations != 0) {
132    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
133       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
134  }
135  os << "===== Allocation by kind\n";
136  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
137  for (int i = 0; i < kNumArenaAllocKinds; i++) {
138      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
139  }
140}
141
142// Explicitly instantiate the used implementation.
143template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
144
145Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
146}
147
148MallocArena::MallocArena(size_t size) {
149  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
150  size_ = size;
151}
152
153MallocArena::~MallocArena() {
154  free(reinterpret_cast<void*>(memory_));
155}
156
157MemMapArena::MemMapArena(size_t size, bool low_4gb) {
158  std::string error_msg;
159  map_.reset(MemMap::MapAnonymous(
160      "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
161  CHECK(map_.get() != nullptr) << error_msg;
162  memory_ = map_->Begin();
163  size_ = map_->Size();
164}
165
166MemMapArena::~MemMapArena() {
167  // Destroys MemMap via std::unique_ptr<>.
168}
169
170void MemMapArena::Release() {
171  if (bytes_allocated_ > 0) {
172    map_->MadviseDontNeedAndZero();
173    bytes_allocated_ = 0;
174  }
175}
176
177void Arena::Reset() {
178  if (bytes_allocated_ > 0) {
179    memset(Begin(), 0, bytes_allocated_);
180    bytes_allocated_ = 0;
181  }
182}
183
184ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
185    : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
186      low_4gb_(low_4gb) {
187  if (low_4gb) {
188    CHECK(!use_malloc) << "low4gb must use map implementation";
189  }
190  if (!use_malloc) {
191    MemMap::Init();
192  }
193}
194
195ArenaPool::~ArenaPool() {
196  while (free_arenas_ != nullptr) {
197    auto* arena = free_arenas_;
198    free_arenas_ = free_arenas_->next_;
199    delete arena;
200  }
201}
202
203Arena* ArenaPool::AllocArena(size_t size) {
204  Thread* self = Thread::Current();
205  Arena* ret = nullptr;
206  {
207    MutexLock lock(self, lock_);
208    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
209      ret = free_arenas_;
210      free_arenas_ = free_arenas_->next_;
211    }
212  }
213  if (ret == nullptr) {
214    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
215        new MemMapArena(size, low_4gb_);
216  }
217  ret->Reset();
218  return ret;
219}
220
221void ArenaPool::TrimMaps() {
222  if (!use_malloc_) {
223    // Doesn't work for malloc.
224    MutexLock lock(Thread::Current(), lock_);
225    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
226      arena->Release();
227    }
228  }
229}
230
231size_t ArenaPool::GetBytesAllocated() const {
232  size_t total = 0;
233  MutexLock lock(Thread::Current(), lock_);
234  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
235    total += arena->GetBytesAllocated();
236  }
237  return total;
238}
239
240void ArenaPool::FreeArenaChain(Arena* first) {
241  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
242    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
243      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
244    }
245  }
246  if (first != nullptr) {
247    Arena* last = first;
248    while (last->next_ != nullptr) {
249      last = last->next_;
250    }
251    Thread* self = Thread::Current();
252    MutexLock lock(self, lock_);
253    last->next_ = free_arenas_;
254    free_arenas_ = first;
255  }
256}
257
258size_t ArenaAllocator::BytesAllocated() const {
259  return ArenaAllocatorStats::BytesAllocated();
260}
261
262size_t ArenaAllocator::BytesUsed() const {
263  size_t total = ptr_ - begin_;
264  if (arena_head_ != nullptr) {
265    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
266         cur_arena = cur_arena->next_) {
267     total += cur_arena->GetBytesAllocated();
268    }
269  }
270  return total;
271}
272
273ArenaAllocator::ArenaAllocator(ArenaPool* pool)
274  : pool_(pool),
275    begin_(nullptr),
276    end_(nullptr),
277    ptr_(nullptr),
278    arena_head_(nullptr) {
279}
280
281void ArenaAllocator::UpdateBytesAllocated() {
282  if (arena_head_ != nullptr) {
283    // Update how many bytes we have allocated into the arena so that the arena pool knows how
284    // much memory to zero out.
285    arena_head_->bytes_allocated_ = ptr_ - begin_;
286  }
287}
288
289void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
290  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
291  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
292    // Obtain a new block.
293    ObtainNewArenaForAllocation(rounded_bytes);
294    CHECK(ptr_ != nullptr);
295    MEMORY_TOOL_MAKE_UNDEFINED(ptr_, end_ - ptr_);
296  }
297  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
298  uint8_t* ret = ptr_;
299  ptr_ += rounded_bytes;
300  // Check that the memory is already zeroed out.
301  for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
302    CHECK_EQ(*ptr, 0U);
303  }
304  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
305  MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
306  return ret;
307}
308
309ArenaAllocator::~ArenaAllocator() {
310  // Reclaim all the arenas by giving them back to the thread pool.
311  UpdateBytesAllocated();
312  pool_->FreeArenaChain(arena_head_);
313}
314
315void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
316  UpdateBytesAllocated();
317  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
318  new_arena->next_ = arena_head_;
319  arena_head_ = new_arena;
320  // Update our internal data structures.
321  ptr_ = begin_ = new_arena->Begin();
322  end_ = new_arena->End();
323}
324
325bool ArenaAllocator::Contains(const void* ptr) const {
326  if (ptr >= begin_ && ptr < end_) {
327    return true;
328  }
329  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
330    if (cur_arena->Contains(ptr)) {
331      return true;
332    }
333  }
334  return false;
335}
336
337MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
338                   ssize_t lost_bytes_adjustment)
339    : name_(name),
340      stats_(stats),
341      first_arena_(first_arena),
342      lost_bytes_adjustment_(lost_bytes_adjustment) {
343}
344
345void MemStats::Dump(std::ostream& os) const {
346  os << name_ << " stats:\n";
347  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
348}
349
350// Dump memory usage stats.
351MemStats ArenaAllocator::GetMemStats() const {
352  ssize_t lost_bytes_adjustment =
353      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
354  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
355}
356
357}  // namespace art
358