1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "scoped_arena_allocator.h"
18
19#include "arena_allocator-inl.h"
20#include "base/memory_tool.h"
21
22namespace art {
23
24static constexpr size_t kMemoryToolRedZoneBytes = 8;
25
26ArenaStack::ArenaStack(ArenaPool* arena_pool)
27  : DebugStackRefCounter(),
28    stats_and_pool_(arena_pool),
29    bottom_arena_(nullptr),
30    top_arena_(nullptr),
31    top_ptr_(nullptr),
32    top_end_(nullptr) {
33}
34
35ArenaStack::~ArenaStack() {
36  DebugStackRefCounter::CheckNoRefs();
37  stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
38}
39
40void ArenaStack::Reset() {
41  DebugStackRefCounter::CheckNoRefs();
42  stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
43  bottom_arena_ = nullptr;
44  top_arena_  = nullptr;
45  top_ptr_ = nullptr;
46  top_end_ = nullptr;
47}
48
49MemStats ArenaStack::GetPeakStats() const {
50  DebugStackRefCounter::CheckNoRefs();
51  return MemStats("ArenaStack peak", PeakStats(), bottom_arena_);
52}
53
54uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
55  UpdateBytesAllocated();
56  size_t allocation_size = std::max(arena_allocator::kArenaDefaultSize, rounded_bytes);
57  if (UNLIKELY(top_arena_ == nullptr)) {
58    top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
59    top_arena_->next_ = nullptr;
60  } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) {
61    top_arena_ = top_arena_->next_;
62  } else {
63    Arena* tail = top_arena_->next_;
64    top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size);
65    top_arena_ = top_arena_->next_;
66    top_arena_->next_ = tail;
67  }
68  top_end_ = top_arena_->End();
69  // top_ptr_ shall be updated by ScopedArenaAllocator.
70  return top_arena_->Begin();
71}
72
73void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) {
74  if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) {
75    PeakStats()->Copy(*CurrentStats());
76  }
77  CurrentStats()->Copy(restore_stats);
78}
79
80void ArenaStack::UpdateBytesAllocated() {
81  if (top_arena_ != nullptr) {
82    // Update how many bytes we have allocated into the arena so that the arena pool knows how
83    // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is
84    // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this.
85    size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin());
86    if (top_arena_->bytes_allocated_ < allocated) {
87      top_arena_->bytes_allocated_ = allocated;
88    }
89  }
90}
91
92void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
93  // We mark all memory for a newly retrieved arena as inaccessible and then
94  // mark only the actually allocated memory as defined. That leaves red zones
95  // and padding between allocations marked as inaccessible.
96  size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
97  uint8_t* ptr = top_ptr_;
98  if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
99    ptr = AllocateFromNextArena(rounded_bytes);
100    CHECK(ptr != nullptr) << "Failed to allocate memory";
101    MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_ - ptr);
102  }
103  CurrentStats()->RecordAlloc(bytes, kind);
104  top_ptr_ = ptr + rounded_bytes;
105  MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes);
106  return ptr;
107}
108
109ScopedArenaAllocator::ScopedArenaAllocator(ScopedArenaAllocator&& other)
110    : DebugStackReference(std::move(other)),
111      DebugStackRefCounter(),
112      ArenaAllocatorStats(other),
113      arena_stack_(other.arena_stack_),
114      mark_arena_(other.mark_arena_),
115      mark_ptr_(other.mark_ptr_),
116      mark_end_(other.mark_end_) {
117  other.DebugStackRefCounter::CheckNoRefs();
118  other.arena_stack_ = nullptr;
119}
120
121ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack)
122    : DebugStackReference(arena_stack),
123      DebugStackRefCounter(),
124      ArenaAllocatorStats(*arena_stack->CurrentStats()),
125      arena_stack_(arena_stack),
126      mark_arena_(arena_stack->top_arena_),
127      mark_ptr_(arena_stack->top_ptr_),
128      mark_end_(arena_stack->top_end_) {
129}
130
131ScopedArenaAllocator::~ScopedArenaAllocator() {
132  if (arena_stack_ != nullptr) {
133    DoReset();
134  }
135}
136
137void ScopedArenaAllocator::Reset() {
138  DoReset();
139  // If this allocator was Create()d, we need to move the arena_stack_->top_ptr_ past *this.
140  if (mark_ptr_ == reinterpret_cast<uint8_t*>(this)) {
141    arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8);
142  }
143}
144
145void ScopedArenaAllocator::DoReset() {
146  DebugStackReference::CheckTop();
147  DebugStackRefCounter::CheckNoRefs();
148  arena_stack_->UpdatePeakStatsAndRestore(*this);
149  arena_stack_->UpdateBytesAllocated();
150  if (LIKELY(mark_arena_ != nullptr)) {
151    arena_stack_->top_arena_ = mark_arena_;
152    arena_stack_->top_ptr_ = mark_ptr_;
153    arena_stack_->top_end_ = mark_end_;
154  } else if (arena_stack_->bottom_arena_ != nullptr) {
155    mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_;
156    mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin();
157    mark_end_ = arena_stack_->top_end_ = mark_arena_->End();
158  }
159}
160
161}  // namespace art
162