1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <algorithm>
18#include <iomanip>
19#include <numeric>
20
21#include "arena_allocator.h"
22#include "logging.h"
23#include "mem_map.h"
24#include "mutex.h"
25#include "thread-inl.h"
26#include "systrace.h"
27
28namespace art {
29
30static constexpr size_t kMemoryToolRedZoneBytes = 8;
31constexpr size_t Arena::kDefaultSize;
32
33template <bool kCount>
34const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
35  "Misc         ",
36  "SwitchTbl    ",
37  "SlowPaths    ",
38  "GrowBitMap   ",
39  "STL          ",
40  "GraphBuilder ",
41  "Graph        ",
42  "BasicBlock   ",
43  "BlockList    ",
44  "RevPostOrder ",
45  "LinearOrder  ",
46  "ConstantsMap ",
47  "Predecessors ",
48  "Successors   ",
49  "Dominated    ",
50  "Instruction  ",
51  "InvokeInputs ",
52  "PhiInputs    ",
53  "LoopInfo     ",
54  "LIBackEdges  ",
55  "TryCatchInf  ",
56  "UseListNode  ",
57  "Environment  ",
58  "EnvVRegs     ",
59  "EnvLocations ",
60  "LocSummary   ",
61  "SsaBuilder   ",
62  "MoveOperands ",
63  "CodeBuffer   ",
64  "StackMaps    ",
65  "Optimization ",
66  "GVN          ",
67  "InductionVar ",
68  "BCE          ",
69  "DCE          ",
70  "LSE          ",
71  "LICM         ",
72  "SsaLiveness  ",
73  "SsaPhiElim   ",
74  "RefTypeProp  ",
75  "SideEffects  ",
76  "RegAllocator ",
77  "RegAllocVldt ",
78  "StackMapStm  ",
79  "CodeGen      ",
80  "Assembler    ",
81  "ParallelMove ",
82  "GraphChecker ",
83  "Verifier     ",
84  "CallingConv  ",
85};
86
87template <bool kCount>
88ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
89    : num_allocations_(0u) {
90  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
91}
92
93template <bool kCount>
94void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
95  num_allocations_ = other.num_allocations_;
96  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
97}
98
99template <bool kCount>
100void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
101  alloc_stats_[kind] += bytes;
102  ++num_allocations_;
103}
104
105template <bool kCount>
106size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
107  return num_allocations_;
108}
109
110template <bool kCount>
111size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
112  const size_t init = 0u;  // Initial value of the correct type.
113  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
114}
115
116template <bool kCount>
117void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
118                                           ssize_t lost_bytes_adjustment) const {
119  size_t malloc_bytes = 0u;
120  size_t lost_bytes = 0u;
121  size_t num_arenas = 0u;
122  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
123    malloc_bytes += arena->Size();
124    lost_bytes += arena->RemainingSpace();
125    ++num_arenas;
126  }
127  // The lost_bytes_adjustment is used to make up for the fact that the current arena
128  // may not have the bytes_allocated_ updated correctly.
129  lost_bytes += lost_bytes_adjustment;
130  const size_t bytes_allocated = BytesAllocated();
131  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
132     << ", lost: " << lost_bytes << "\n";
133  size_t num_allocations = NumAllocations();
134  if (num_allocations != 0) {
135    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
136       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
137  }
138  os << "===== Allocation by kind\n";
139  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
140  for (int i = 0; i < kNumArenaAllocKinds; i++) {
141      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
142  }
143}
144
145// Explicitly instantiate the used implementation.
146template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
147
148void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
149  MEMORY_TOOL_MAKE_DEFINED(ptr, size);
150}
151
152void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
153  MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
154}
155
156void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
157  MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
158}
159
160Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
161}
162
163MallocArena::MallocArena(size_t size) {
164  memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
165  CHECK(memory_ != nullptr);  // Abort on OOM.
166  size_ = size;
167}
168
169MallocArena::~MallocArena() {
170  free(reinterpret_cast<void*>(memory_));
171}
172
173MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
174  std::string error_msg;
175  map_.reset(MemMap::MapAnonymous(
176      name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
177  CHECK(map_.get() != nullptr) << error_msg;
178  memory_ = map_->Begin();
179  size_ = map_->Size();
180}
181
182MemMapArena::~MemMapArena() {
183  // Destroys MemMap via std::unique_ptr<>.
184}
185
186void MemMapArena::Release() {
187  if (bytes_allocated_ > 0) {
188    map_->MadviseDontNeedAndZero();
189    bytes_allocated_ = 0;
190  }
191}
192
193void Arena::Reset() {
194  if (bytes_allocated_ > 0) {
195    memset(Begin(), 0, bytes_allocated_);
196    bytes_allocated_ = 0;
197  }
198}
199
200ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
201    : use_malloc_(use_malloc),
202      lock_("Arena pool lock", kArenaPoolLock),
203      free_arenas_(nullptr),
204      low_4gb_(low_4gb),
205      name_(name) {
206  if (low_4gb) {
207    CHECK(!use_malloc) << "low4gb must use map implementation";
208  }
209  if (!use_malloc) {
210    MemMap::Init();
211  }
212}
213
214ArenaPool::~ArenaPool() {
215  ReclaimMemory();
216}
217
218void ArenaPool::ReclaimMemory() {
219  while (free_arenas_ != nullptr) {
220    auto* arena = free_arenas_;
221    free_arenas_ = free_arenas_->next_;
222    delete arena;
223  }
224}
225
226void ArenaPool::LockReclaimMemory() {
227  MutexLock lock(Thread::Current(), lock_);
228  ReclaimMemory();
229}
230
231Arena* ArenaPool::AllocArena(size_t size) {
232  Thread* self = Thread::Current();
233  Arena* ret = nullptr;
234  {
235    MutexLock lock(self, lock_);
236    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
237      ret = free_arenas_;
238      free_arenas_ = free_arenas_->next_;
239    }
240  }
241  if (ret == nullptr) {
242    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
243        new MemMapArena(size, low_4gb_, name_);
244  }
245  ret->Reset();
246  return ret;
247}
248
249void ArenaPool::TrimMaps() {
250  if (!use_malloc_) {
251    ScopedTrace trace(__PRETTY_FUNCTION__);
252    // Doesn't work for malloc.
253    MutexLock lock(Thread::Current(), lock_);
254    for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
255      arena->Release();
256    }
257  }
258}
259
260size_t ArenaPool::GetBytesAllocated() const {
261  size_t total = 0;
262  MutexLock lock(Thread::Current(), lock_);
263  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
264    total += arena->GetBytesAllocated();
265  }
266  return total;
267}
268
269void ArenaPool::FreeArenaChain(Arena* first) {
270  if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
271    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
272      MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
273    }
274  }
275  if (first != nullptr) {
276    Arena* last = first;
277    while (last->next_ != nullptr) {
278      last = last->next_;
279    }
280    Thread* self = Thread::Current();
281    MutexLock lock(self, lock_);
282    last->next_ = free_arenas_;
283    free_arenas_ = first;
284  }
285}
286
287size_t ArenaAllocator::BytesAllocated() const {
288  return ArenaAllocatorStats::BytesAllocated();
289}
290
291size_t ArenaAllocator::BytesUsed() const {
292  size_t total = ptr_ - begin_;
293  if (arena_head_ != nullptr) {
294    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
295         cur_arena = cur_arena->next_) {
296     total += cur_arena->GetBytesAllocated();
297    }
298  }
299  return total;
300}
301
302ArenaAllocator::ArenaAllocator(ArenaPool* pool)
303  : pool_(pool),
304    begin_(nullptr),
305    end_(nullptr),
306    ptr_(nullptr),
307    arena_head_(nullptr) {
308}
309
310void ArenaAllocator::UpdateBytesAllocated() {
311  if (arena_head_ != nullptr) {
312    // Update how many bytes we have allocated into the arena so that the arena pool knows how
313    // much memory to zero out.
314    arena_head_->bytes_allocated_ = ptr_ - begin_;
315  }
316}
317
318void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
319  // We mark all memory for a newly retrieved arena as inaccessible and then
320  // mark only the actually allocated memory as defined. That leaves red zones
321  // and padding between allocations marked as inaccessible.
322  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
323  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
324  uint8_t* ret;
325  if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
326    ret = AllocFromNewArena(rounded_bytes);
327    uint8_t* noaccess_begin = ret + bytes;
328    uint8_t* noaccess_end;
329    if (ret == arena_head_->Begin()) {
330      DCHECK(ptr_ - rounded_bytes == ret);
331      noaccess_end = end_;
332    } else {
333      // We're still using the old arena but `ret` comes from a new one just after it.
334      DCHECK(arena_head_->next_ != nullptr);
335      DCHECK(ret == arena_head_->next_->Begin());
336      DCHECK_EQ(rounded_bytes, arena_head_->next_->GetBytesAllocated());
337      noaccess_end = arena_head_->next_->End();
338    }
339    MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
340  } else {
341    ret = ptr_;
342    ptr_ += rounded_bytes;
343  }
344  MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
345  // Check that the memory is already zeroed out.
346  DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
347  return ret;
348}
349
350ArenaAllocator::~ArenaAllocator() {
351  // Reclaim all the arenas by giving them back to the thread pool.
352  UpdateBytesAllocated();
353  pool_->FreeArenaChain(arena_head_);
354}
355
356uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
357  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, bytes));
358  DCHECK(new_arena != nullptr);
359  DCHECK_LE(bytes, new_arena->Size());
360  if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
361    // The old arena has more space remaining than the new one, so keep using it.
362    // This can happen when the requested size is over half of the default size.
363    DCHECK(arena_head_ != nullptr);
364    new_arena->bytes_allocated_ = bytes;  // UpdateBytesAllocated() on the new_arena.
365    new_arena->next_ = arena_head_->next_;
366    arena_head_->next_ = new_arena;
367  } else {
368    UpdateBytesAllocated();
369    new_arena->next_ = arena_head_;
370    arena_head_ = new_arena;
371    // Update our internal data structures.
372    begin_ = new_arena->Begin();
373    ptr_ = begin_ + bytes;
374    end_ = new_arena->End();
375  }
376  return new_arena->Begin();
377}
378
379bool ArenaAllocator::Contains(const void* ptr) const {
380  if (ptr >= begin_ && ptr < end_) {
381    return true;
382  }
383  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
384    if (cur_arena->Contains(ptr)) {
385      return true;
386    }
387  }
388  return false;
389}
390
391MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
392                   ssize_t lost_bytes_adjustment)
393    : name_(name),
394      stats_(stats),
395      first_arena_(first_arena),
396      lost_bytes_adjustment_(lost_bytes_adjustment) {
397}
398
399void MemStats::Dump(std::ostream& os) const {
400  os << name_ << " stats:\n";
401  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
402}
403
404// Dump memory usage stats.
405MemStats ArenaAllocator::GetMemStats() const {
406  ssize_t lost_bytes_adjustment =
407      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
408  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
409}
410
411}  // namespace art
412