scoped_arena_allocator.h revision 07206af370746e6d7cf528e655b4854e7a865cfa
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_
18#define ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_
19
20#include "base/logging.h"
21#include "base/macros.h"
22#include "utils/arena_allocator.h"
23#include "utils/debug_stack.h"
24#include "globals.h"
25
26namespace art {
27
28class ArenaStack;
29class ScopedArenaAllocator;
30
31template <typename T>
32class ScopedArenaAllocatorAdapter;
33
34// Holds a list of Arenas for use by ScopedArenaAllocator stack.
35class ArenaStack : private DebugStackRefCounter {
36 public:
37  explicit ArenaStack(ArenaPool* arena_pool);
38  ~ArenaStack();
39
40  void Reset();
41
42  size_t PeakBytesAllocated() {
43    return PeakStats()->BytesAllocated();
44  }
45
46  MemStats GetPeakStats() const;
47
48 private:
49  struct Peak;
50  struct Current;
51  template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
52  struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
53    explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
54    ArenaPool* const pool;
55  };
56
57  ArenaAllocatorStats* PeakStats() {
58    return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
59  }
60
61  ArenaAllocatorStats* CurrentStats() {
62    return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
63  }
64
65  // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
66  void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
67    if (UNLIKELY(running_on_valgrind_)) {
68      return AllocValgrind(bytes, kind);
69    }
70    size_t rounded_bytes = RoundUp(bytes, 8);
71    uint8_t* ptr = top_ptr_;
72    if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
73      ptr = AllocateFromNextArena(rounded_bytes);
74    }
75    CurrentStats()->RecordAlloc(bytes, kind);
76    top_ptr_ = ptr + rounded_bytes;
77    return ptr;
78  }
79
80  uint8_t* AllocateFromNextArena(size_t rounded_bytes);
81  void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
82  void UpdateBytesAllocated();
83  void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
84
85  StatsAndPool stats_and_pool_;
86  Arena* bottom_arena_;
87  Arena* top_arena_;
88  uint8_t* top_ptr_;
89  uint8_t* top_end_;
90
91  const bool running_on_valgrind_;
92
93  friend class ScopedArenaAllocator;
94  template <typename T>
95  friend class ScopedArenaAllocatorAdapter;
96
97  DISALLOW_COPY_AND_ASSIGN(ArenaStack);
98};
99
100class ScopedArenaAllocator
101    : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
102 public:
103  // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of
104  // the allocator is not exactly a C++ block scope. For example, an optimization
105  // pass can create the scoped allocator in Start() and destroy it in End().
106  static ScopedArenaAllocator* Create(ArenaStack* arena_stack) {
107    void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc);
108    ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack);
109    allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
110    return allocator;
111  }
112
113  explicit ScopedArenaAllocator(ArenaStack* arena_stack);
114  ~ScopedArenaAllocator();
115
116  void Reset();
117
118  void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
119    DebugStackReference::CheckTop();
120    return arena_stack_->Alloc(bytes, kind);
121  }
122
123  // ScopedArenaAllocatorAdapter is incomplete here, we need to define this later.
124  ScopedArenaAllocatorAdapter<void> Adapter();
125
126  // Allow a delete-expression to destroy but not deallocate allocators created by Create().
127  static void operator delete(void* ptr) { UNUSED(ptr); }
128
129 private:
130  ArenaStack* const arena_stack_;
131  Arena* mark_arena_;
132  uint8_t* mark_ptr_;
133  uint8_t* mark_end_;
134
135  template <typename T>
136  friend class ScopedArenaAllocatorAdapter;
137
138  DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
139};
140
141template <>
142class ScopedArenaAllocatorAdapter<void>
143    : private DebugStackReference, private DebugStackIndirectTopRef {
144 public:
145  typedef void value_type;
146  typedef void* pointer;
147  typedef const void* const_pointer;
148
149  template <typename U>
150  struct rebind {
151    typedef ScopedArenaAllocatorAdapter<U> other;
152  };
153
154  explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator)
155      : DebugStackReference(arena_allocator),
156        DebugStackIndirectTopRef(arena_allocator),
157        arena_stack_(arena_allocator->arena_stack_) {
158  }
159  template <typename U>
160  ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
161      : DebugStackReference(other),
162        DebugStackIndirectTopRef(other),
163        arena_stack_(other.arena_stack_) {
164  }
165  ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter& other) = default;
166  ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter& other) = default;
167  ~ScopedArenaAllocatorAdapter() = default;
168
169 private:
170  ArenaStack* arena_stack_;
171
172  template <typename U>
173  friend class ScopedArenaAllocatorAdapter;
174};
175
176// Adapter for use of ScopedArenaAllocator in STL containers.
177template <typename T>
178class ScopedArenaAllocatorAdapter : private DebugStackReference, private DebugStackIndirectTopRef {
179 public:
180  typedef T value_type;
181  typedef T* pointer;
182  typedef T& reference;
183  typedef const T* const_pointer;
184  typedef const T& const_reference;
185  typedef size_t size_type;
186  typedef ptrdiff_t difference_type;
187
188  template <typename U>
189  struct rebind {
190    typedef ScopedArenaAllocatorAdapter<U> other;
191  };
192
193  explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator)
194      : DebugStackReference(arena_allocator),
195        DebugStackIndirectTopRef(arena_allocator),
196        arena_stack_(arena_allocator->arena_stack_) {
197  }
198  template <typename U>
199  ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
200      : DebugStackReference(other),
201        DebugStackIndirectTopRef(other),
202        arena_stack_(other.arena_stack_) {
203  }
204  ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter& other) = default;
205  ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter& other) = default;
206  ~ScopedArenaAllocatorAdapter() = default;
207
208  size_type max_size() const {
209    return static_cast<size_type>(-1) / sizeof(T);
210  }
211
212  pointer address(reference x) const { return &x; }
213  const_pointer address(const_reference x) const { return &x; }
214
215  pointer allocate(size_type n, ScopedArenaAllocatorAdapter<void>::pointer hint = nullptr) {
216    DCHECK_LE(n, max_size());
217    DebugStackIndirectTopRef::CheckTop();
218    return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T), kArenaAllocSTL));
219  }
220  void deallocate(pointer p, size_type n) {
221    DebugStackIndirectTopRef::CheckTop();
222  }
223
224  void construct(pointer p, const_reference val) {
225    // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top.
226    new (static_cast<void*>(p)) value_type(val);
227  }
228  void destroy(pointer p) {
229    // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top.
230    p->~value_type();
231  }
232
233 private:
234  ArenaStack* arena_stack_;
235
236  template <typename U>
237  friend class ScopedArenaAllocatorAdapter;
238
239  template <typename U>
240  friend bool operator==(const ScopedArenaAllocatorAdapter<U>& lhs,
241                         const ScopedArenaAllocatorAdapter<U>& rhs);
242};
243
244template <typename T>
245inline bool operator==(const ScopedArenaAllocatorAdapter<T>& lhs,
246                       const ScopedArenaAllocatorAdapter<T>& rhs) {
247  return lhs.arena_stack_ == rhs.arena_stack_;
248}
249
250template <typename T>
251inline bool operator!=(const ScopedArenaAllocatorAdapter<T>& lhs,
252                       const ScopedArenaAllocatorAdapter<T>& rhs) {
253  return !(lhs == rhs);
254}
255
256inline ScopedArenaAllocatorAdapter<void> ScopedArenaAllocator::Adapter() {
257  return ScopedArenaAllocatorAdapter<void>(this);
258}
259
260}  // namespace art
261
262#endif  // ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_
263