arena_allocator.cc revision 32ce2adefb8a3d0eda59a29f5e87c1eb43eef796
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26#include "systrace.h"
27
28namespace art {
29
30static constexpr size_t kMemoryToolRedZoneBytes = 8;
31constexpr size_t Arena::kDefaultSize;
32
33template <bool kCount>
34const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35  "Misc         ",
36  "BBList       ",
37  "BBPreds      ",
38  "DfsPreOrd    ",
39  "DfsPostOrd   ",
40  "DomPostOrd   ",
41  "TopoOrd      ",
42  "Lowering     ",
43  "LIR          ",
44  "LIR masks    ",
45  "SwitchTbl    ",
46  "FillArray    ",
47  "SlowPaths    ",
48  "MIR          ",
49  "DataFlow     ",
50  "GrowList     ",
51  "GrowBitMap   ",
52  "SSA2Dalvik   ",
53  "Dalvik2SSA   ",
54  "DebugInfo    ",
55  "RegAlloc     ",
56  "Data         ",
57  "STL          ",
58  "GraphBuilder ",
59  "Graph        ",
60  "BasicBlock   ",
61  "BlockList    ",
62  "RevPostOrder ",
63  "LinearOrder  ",
64  "ConstantsMap ",
65  "Predecessors ",
66  "Successors   ",
67  "Dominated    ",
68  "Instruction  ",
69  "InvokeInputs ",
70  "PhiInputs    ",
71  "LoopInfo     ",
72  "LIBackEdges  ",
73  "TryCatchInf  ",
74  "UseListNode  ",
75  "Environment  ",
76  "EnvVRegs     ",
77  "EnvLocations ",
78  "LocSummary   ",
79  "SsaBuilder   ",
80  "MoveOperands ",
81  "CodeBuffer   ",
82  "StackMaps    ",
83  "BaselineMaps ",
84  "Optimization ",
85  "GVN          ",
86  "InductionVar ",
87  "BCE          ",
88  "SsaLiveness  ",
89  "SsaPhiElim   ",
90  "RefTypeProp  ",
91  "PrimTypeProp ",
92  "SideEffects  ",
93  "RegAllocator ",
94  "StackMapStm  ",
95  "CodeGen      ",
96  "ParallelMove ",
97  "GraphChecker ",
98  "LSE          ",
99  "Verifier     ",
100};
101
102template <bool kCount>
103ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
104    : num_allocations_(0u) {
105  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
106}
107
108template <bool kCount>
109void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
110  num_allocations_ = other.num_allocations_;
111  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
112}
113
114template <bool kCount>
115void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
116  alloc_stats_[kind] += bytes;
117  ++num_allocations_;
118}
119
120template <bool kCount>
121size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
122  return num_allocations_;
123}
124
125template <bool kCount>
126size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
127  const size_t init = 0u;  // Initial value of the correct type.
128  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
129}
130
131template <bool kCount>
132void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
133                                           ssize_t lost_bytes_adjustment) const {
134  size_t malloc_bytes = 0u;
135  size_t lost_bytes = 0u;
136  size_t num_arenas = 0u;
137  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
138    malloc_bytes += arena->Size();
139    lost_bytes += arena->RemainingSpace();
140    ++num_arenas;
141  }
142  // The lost_bytes_adjustment is used to make up for the fact that the current arena
143  // may not have the bytes_allocated_ updated correctly.
144  lost_bytes += lost_bytes_adjustment;
145  const size_t bytes_allocated = BytesAllocated();
146  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
147     << ", lost: " << lost_bytes << "\n";
148  size_t num_allocations = NumAllocations();
149  if (num_allocations != 0) {
150    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
151       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
152  }
153  os << "===== Allocation by kind\n";
154  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
155  for (int i = 0; i < kNumArenaAllocKinds; i++) {
156      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
157  }
158}
159
160// Explicitly instantiate the used implementation.
161template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
162
163void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
164  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
165}
166
167void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
168  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
169}
170
171void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
172  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
173}
174
175Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
176}
177
178MallocArena::MallocArena(size_t size) {
179  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
180  size_ = size;
181}
182
183MallocArena::~MallocArena() {
184  free(reinterpret_cast<void*>(memory_));
185}
186
187MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
188  std::string error_msg;
189  map_.reset(MemMap::MapAnonymous(
190      name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
191  CHECK(map_.get() != nullptr) << error_msg;
192  memory_ = map_->Begin();
193  size_ = map_->Size();
194}
195
196MemMapArena::~MemMapArena() {
197  // Destroys MemMap via std::unique_ptr<>.
198}
199
200void MemMapArena::Release() {
201  if (bytes_allocated_ > 0) {
202    map_->MadviseDontNeedAndZero();
203    bytes_allocated_ = 0;
204  }
205}
206
207void Arena::Reset() {
208  if (bytes_allocated_ > 0) {
209    memset(Begin(), 0, bytes_allocated_);
210    bytes_allocated_ = 0;
211  }
212}
213
214ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
215    : use_malloc_(use_malloc),
216      lock_("Arena pool lock", kArenaPoolLock),
217      free_arenas_(nullptr),
218      low_4gb_(low_4gb),
219      name_(name) {
220  if (low_4gb) {
221    CHECK(!use_malloc) << "low4gb must use map implementation";
222  }
223  if (!use_malloc) {
224    MemMap::Init();
225  }
226}
227
228ArenaPool::~ArenaPool() {
229  ReclaimMemory();
230}
231
232void ArenaPool::ReclaimMemory() {
233  while (free_arenas_ != nullptr) {
234    auto* arena = free_arenas_;
235    free_arenas_ = free_arenas_->next_;
236    delete arena;
237  }
238}
239
240void ArenaPool::LockReclaimMemory() {
241  MutexLock lock(Thread::Current(), lock_);
242  ReclaimMemory();
243}
244
245Arena* ArenaPool::AllocArena(size_t size) {
246  Thread* self = Thread::Current();
247  Arena* ret = nullptr;
248  {
249    MutexLock lock(self, lock_);
250    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
251      ret = free_arenas_;
252      free_arenas_ = free_arenas_->next_;
253    }
254  }
255  if (ret == nullptr) {
256    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
257        new MemMapArena(size, low_4gb_, name_);
258  }
259  ret->Reset();
260  return ret;
261}
262
263void ArenaPool::TrimMaps() {
264  if (!use_malloc_) {
265    ScopedTrace trace(__PRETTY_FUNCTION__);
266    // Doesn't work for malloc.
267    MutexLock lock(Thread::Current(), lock_);
268    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
269      arena->Release();
270    }
271  }
272}
273
274size_t ArenaPool::GetBytesAllocated() const {
275  size_t total = 0;
276  MutexLock lock(Thread::Current(), lock_);
277  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
278    total += arena->GetBytesAllocated();
279  }
280  return total;
281}
282
283void ArenaPool::FreeArenaChain(Arena* first) {
284  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
285    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
286      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
287    }
288  }
289  if (first != nullptr) {
290    Arena* last = first;
291    while (last->next_ != nullptr) {
292      last = last->next_;
293    }
294    Thread* self = Thread::Current();
295    MutexLock lock(self, lock_);
296    last->next_ = free_arenas_;
297    free_arenas_ = first;
298  }
299}
300
301size_t ArenaAllocator::BytesAllocated() const {
302  return ArenaAllocatorStats::BytesAllocated();
303}
304
305size_t ArenaAllocator::BytesUsed() const {
306  size_t total = ptr_ - begin_;
307  if (arena_head_ != nullptr) {
308    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
309         cur_arena = cur_arena->next_) {
310     total += cur_arena->GetBytesAllocated();
311    }
312  }
313  return total;
314}
315
316ArenaAllocator::ArenaAllocator(ArenaPool* pool)
317  : pool_(pool),
318    begin_(nullptr),
319    end_(nullptr),
320    ptr_(nullptr),
321    arena_head_(nullptr) {
322}
323
324void ArenaAllocator::UpdateBytesAllocated() {
325  if (arena_head_ != nullptr) {
326    // Update how many bytes we have allocated into the arena so that the arena pool knows how
327    // much memory to zero out.
328    arena_head_->bytes_allocated_ = ptr_ - begin_;
329  }
330}
331
332void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
333  // We mark all memory for a newly retrieved arena as inaccessible and then
334  // mark only the actually allocated memory as defined. That leaves red zones
335  // and padding between allocations marked as inaccessible.
336  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
337  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
338    // Obtain a new block.
339    ObtainNewArenaForAllocation(rounded_bytes);
340    CHECK(ptr_ != nullptr);
341    MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
342  }
343  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
344  uint8_t* ret = ptr_;
345  ptr_ += rounded_bytes;
346  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
347  // Check that the memory is already zeroed out.
348  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
349  return ret;
350}
351
352ArenaAllocator::~ArenaAllocator() {
353  // Reclaim all the arenas by giving them back to the thread pool.
354  UpdateBytesAllocated();
355  pool_->FreeArenaChain(arena_head_);
356}
357
358void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
359  UpdateBytesAllocated();
360  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
361  new_arena->next_ = arena_head_;
362  arena_head_ = new_arena;
363  // Update our internal data structures.
364  ptr_ = begin_ = new_arena->Begin();
365  end_ = new_arena->End();
366}
367
368bool ArenaAllocator::Contains(const void* ptr) const {
369  if (ptr >= begin_ && ptr < end_) {
370    return true;
371  }
372  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
373    if (cur_arena->Contains(ptr)) {
374      return true;
375    }
376  }
377  return false;
378}
379
380MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
381                   ssize_t lost_bytes_adjustment)
382    : name_(name),
383      stats_(stats),
384      first_arena_(first_arena),
385      lost_bytes_adjustment_(lost_bytes_adjustment) {
386}
387
388void MemStats::Dump(std::ostream& os) const {
389  os << name_ << " stats:\n";
390  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
391}
392
393// Dump memory usage stats.
394MemStats ArenaAllocator::GetMemStats() const {
395  ssize_t lost_bytes_adjustment =
396      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
397  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
398}
399
400}  // namespace art
401