1// Copyright 2000 The RE2 Authors.  All Rights Reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5#include "util/util.h"
6
7namespace re2 {
8
9// ----------------------------------------------------------------------
10// UnsafeArena::UnsafeArena()
11// UnsafeArena::~UnsafeArena()
12//    Destroying the arena automatically calls Reset()
13// ----------------------------------------------------------------------
14
15
16UnsafeArena::UnsafeArena(const size_t block_size)
17  : block_size_(block_size),
18    freestart_(NULL),                   // set for real in Reset()
19    last_alloc_(NULL),
20    remaining_(0),
21    blocks_alloced_(1),
22    overflow_blocks_(NULL) {
23  assert(block_size > kDefaultAlignment);
24
25  first_blocks_[0].mem = reinterpret_cast<char*>(malloc(block_size_));
26  first_blocks_[0].size = block_size_;
27
28  Reset();
29}
30
31UnsafeArena::~UnsafeArena() {
32  FreeBlocks();
33  assert(overflow_blocks_ == NULL);    // FreeBlocks() should do that
34  // The first X blocks stay allocated always by default.  Delete them now.
35  for (int i = 0; i < blocks_alloced_; i++)
36    free(first_blocks_[i].mem);
37}
38
39// ----------------------------------------------------------------------
40// UnsafeArena::Reset()
41//    Clears all the memory an arena is using.
42// ----------------------------------------------------------------------
43
44void UnsafeArena::Reset() {
45  FreeBlocks();
46  freestart_ = first_blocks_[0].mem;
47  remaining_ = first_blocks_[0].size;
48  last_alloc_ = NULL;
49
50  // We do not know for sure whether or not the first block is aligned,
51  // so we fix that right now.
52  const int overage = reinterpret_cast<uintptr_t>(freestart_) &
53                      (kDefaultAlignment-1);
54  if (overage > 0) {
55    const int waste = kDefaultAlignment - overage;
56    freestart_ += waste;
57    remaining_ -= waste;
58  }
59  freestart_when_empty_ = freestart_;
60  assert(!(reinterpret_cast<uintptr_t>(freestart_)&(kDefaultAlignment-1)));
61}
62
63// -------------------------------------------------------------
64// UnsafeArena::AllocNewBlock()
65//    Adds and returns an AllocatedBlock.
66//    The returned AllocatedBlock* is valid until the next call
67//    to AllocNewBlock or Reset.  (i.e. anything that might
68//    affect overflow_blocks_).
69// -------------------------------------------------------------
70
71UnsafeArena::AllocatedBlock* UnsafeArena::AllocNewBlock(const size_t block_size) {
72  AllocatedBlock *block;
73  // Find the next block.
74  if ( blocks_alloced_ < arraysize(first_blocks_) ) {
75    // Use one of the pre-allocated blocks
76    block = &first_blocks_[blocks_alloced_++];
77  } else {                   // oops, out of space, move to the vector
78    if (overflow_blocks_ == NULL) overflow_blocks_ = new vector<AllocatedBlock>;
79    // Adds another block to the vector.
80    overflow_blocks_->resize(overflow_blocks_->size()+1);
81    // block points to the last block of the vector.
82    block = &overflow_blocks_->back();
83  }
84
85  block->mem = reinterpret_cast<char*>(malloc(block_size));
86  block->size = block_size;
87
88  return block;
89}
90
91// ----------------------------------------------------------------------
92// UnsafeArena::GetMemoryFallback()
93//    We take memory out of our pool, aligned on the byte boundary
94//    requested.  If we don't have space in our current pool, we
95//    allocate a new block (wasting the remaining space in the
96//    current block) and give you that.  If your memory needs are
97//    too big for a single block, we make a special your-memory-only
98//    allocation -- this is equivalent to not using the arena at all.
99// ----------------------------------------------------------------------
100
101void* UnsafeArena::GetMemoryFallback(const size_t size, const int align) {
102  if (size == 0)
103    return NULL;             // stl/stl_alloc.h says this is okay
104
105  assert(align > 0 && 0 == (align & (align - 1)));  // must be power of 2
106
107  // If the object is more than a quarter of the block size, allocate
108  // it separately to avoid wasting too much space in leftover bytes
109  if (block_size_ == 0 || size > block_size_/4) {
110    // then it gets its own block in the arena
111    assert(align <= kDefaultAlignment);   // because that's what new gives us
112    // This block stays separate from the rest of the world; in particular
113    // we don't update last_alloc_ so you can't reclaim space on this block.
114    return AllocNewBlock(size)->mem;
115  }
116
117  const int overage =
118    (reinterpret_cast<uintptr_t>(freestart_) & (align-1));
119  if (overage) {
120    const int waste = align - overage;
121    freestart_ += waste;
122    if (waste < remaining_) {
123      remaining_ -= waste;
124    } else {
125      remaining_ = 0;
126    }
127  }
128  if (size > remaining_) {
129    AllocatedBlock *block = AllocNewBlock(block_size_);
130    freestart_ = block->mem;
131    remaining_ = block->size;
132  }
133  remaining_ -= size;
134  last_alloc_ = freestart_;
135  freestart_ += size;
136  assert((reinterpret_cast<uintptr_t>(last_alloc_) & (align-1)) == 0);
137  return reinterpret_cast<void*>(last_alloc_);
138}
139
140// ----------------------------------------------------------------------
141// UnsafeArena::FreeBlocks()
142//    Unlike GetMemory(), which does actual work, ReturnMemory() is a
143//    no-op: we don't "free" memory until Reset() is called.  We do
144//    update some stats, though.  Note we do no checking that the
145//    pointer you pass in was actually allocated by us, or that it
146//    was allocated for the size you say, so be careful here!
147//       FreeBlocks() does the work for Reset(), actually freeing all
148//    memory allocated in one fell swoop.
149// ----------------------------------------------------------------------
150
151void UnsafeArena::FreeBlocks() {
152  for ( int i = 1; i < blocks_alloced_; ++i ) {  // keep first block alloced
153    free(first_blocks_[i].mem);
154    first_blocks_[i].mem = NULL;
155    first_blocks_[i].size = 0;
156  }
157  blocks_alloced_ = 1;
158  if (overflow_blocks_ != NULL) {
159    vector<AllocatedBlock>::iterator it;
160    for (it = overflow_blocks_->begin(); it != overflow_blocks_->end(); ++it) {
161      free(it->mem);
162    }
163    delete overflow_blocks_;             // These should be used very rarely
164    overflow_blocks_ = NULL;
165  }
166}
167
168}  // namespace re2
169