ASTVector.h revision 9c9bd84383f742513b3cfd656948bab16d752937
1//===- ASTVector.h - Vector that uses ASTContext for allocation  --*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file provides ASTVector, a vector  ADT whose contents are
11//  allocated using the allocator associated with an ASTContext..
12//
13//===----------------------------------------------------------------------===//
14
15// FIXME: Most of this is copy-and-paste from BumpVector.h and SmallVector.h.
16// We can refactor this core logic into something common.
17
18#ifndef LLVM_CLANG_AST_VECTOR
19#define LLVM_CLANG_AST_VECTOR
20
21#include "llvm/Support/type_traits.h"
22#include "llvm/Support/Allocator.h"
23#include "llvm/ADT/PointerIntPair.h"
24#include <algorithm>
25#include <memory>
26#include <cstring>
27
28#ifdef _MSC_VER
29namespace std {
30#if _MSC_VER <= 1310
31  // Work around flawed VC++ implementation of std::uninitialized_copy.  Define
32  // additional overloads so that elements with pointer types are recognized as
33  // scalars and not objects, causing bizarre type conversion errors.
34  template<class T1, class T2>
35  inline _Scalar_ptr_iterator_tag _Ptr_cat(T1 **, T2 **) {
36    _Scalar_ptr_iterator_tag _Cat;
37    return _Cat;
38  }
39
40  template<class T1, class T2>
41  inline _Scalar_ptr_iterator_tag _Ptr_cat(T1* const *, T2 **) {
42    _Scalar_ptr_iterator_tag _Cat;
43    return _Cat;
44  }
45#else
46  // FIXME: It is not clear if the problem is fixed in VS 2005.  What is clear
47  // is that the above hack won't work if it wasn't fixed.
48#endif
49}
50#endif
51
52namespace clang {
53
54template<typename T>
55class ASTVector {
56  T *Begin, *End, *Capacity;
57
58  void setEnd(T *P) { this->End = P; }
59
60public:
61  // Default ctor - Initialize to empty.
62  explicit ASTVector(ASTContext &C, unsigned N = 0)
63  : Begin(NULL), End(NULL), Capacity(NULL) {
64    reserve(C, N);
65  }
66
67  ~ASTVector() {
68    if (llvm::is_class<T>::value) {
69      // Destroy the constructed elements in the vector.
70      destroy_range(Begin, End);
71    }
72  }
73
74  typedef size_t size_type;
75  typedef ptrdiff_t difference_type;
76  typedef T value_type;
77  typedef T* iterator;
78  typedef const T* const_iterator;
79
80  typedef std::reverse_iterator<const_iterator>  const_reverse_iterator;
81  typedef std::reverse_iterator<iterator>  reverse_iterator;
82
83  typedef T& reference;
84  typedef const T& const_reference;
85  typedef T* pointer;
86  typedef const T* const_pointer;
87
88  // forward iterator creation methods.
89  iterator begin() { return Begin; }
90  const_iterator begin() const { return Begin; }
91  iterator end() { return End; }
92  const_iterator end() const { return End; }
93
94  // reverse iterator creation methods.
95  reverse_iterator rbegin()            { return reverse_iterator(end()); }
96  const_reverse_iterator rbegin() const{ return const_reverse_iterator(end()); }
97  reverse_iterator rend()              { return reverse_iterator(begin()); }
98  const_reverse_iterator rend() const { return const_reverse_iterator(begin());}
99
100  bool empty() const { return Begin == End; }
101  size_type size() const { return End-Begin; }
102
103  reference operator[](unsigned idx) {
104    assert(Begin + idx < End);
105    return Begin[idx];
106  }
107  const_reference operator[](unsigned idx) const {
108    assert(Begin + idx < End);
109    return Begin[idx];
110  }
111
112  reference front() {
113    return begin()[0];
114  }
115  const_reference front() const {
116    return begin()[0];
117  }
118
119  reference back() {
120    return end()[-1];
121  }
122  const_reference back() const {
123    return end()[-1];
124  }
125
126  void pop_back() {
127    --End;
128    End->~T();
129  }
130
131  T pop_back_val() {
132    T Result = back();
133    pop_back();
134    return Result;
135  }
136
137  void clear() {
138    if (llvm::is_class<T>::value) {
139      destroy_range(Begin, End);
140    }
141    End = Begin;
142  }
143
144  /// data - Return a pointer to the vector's buffer, even if empty().
145  pointer data() {
146    return pointer(Begin);
147  }
148
149  /// data - Return a pointer to the vector's buffer, even if empty().
150  const_pointer data() const {
151    return const_pointer(Begin);
152  }
153
154  void push_back(const_reference Elt, ASTContext &C) {
155    if (End < Capacity) {
156    Retry:
157      new (End) T(Elt);
158      ++End;
159      return;
160    }
161    grow(C);
162    goto Retry;
163  }
164
165  void reserve(ASTContext &C, unsigned N) {
166    if (unsigned(Capacity-Begin) < N)
167      grow(C, N);
168  }
169
170  /// capacity - Return the total number of elements in the currently allocated
171  /// buffer.
172  size_t capacity() const { return Capacity - Begin; }
173
174  /// append - Add the specified range to the end of the SmallVector.
175  ///
176  template<typename in_iter>
177  void append(ASTContext &C, in_iter in_start, in_iter in_end) {
178    size_type NumInputs = std::distance(in_start, in_end);
179    // Grow allocated space if needed.
180    if (NumInputs > size_type(this->capacity_ptr()-this->end()))
181      this->grow(C, this->size()+NumInputs);
182
183    // Copy the new elements over.
184    // TODO: NEED To compile time dispatch on whether in_iter is a random access
185    // iterator to use the fast uninitialized_copy.
186    std::uninitialized_copy(in_start, in_end, this->end());
187    this->setEnd(this->end() + NumInputs);
188  }
189
190  /// append - Add the specified range to the end of the SmallVector.
191  ///
192  void append(ASTContext &C, size_type NumInputs, const T &Elt) {
193    // Grow allocated space if needed.
194    if (NumInputs > size_type(this->capacity_ptr()-this->end()))
195      this->grow(C, this->size()+NumInputs);
196
197    // Copy the new elements over.
198    std::uninitialized_fill_n(this->end(), NumInputs, Elt);
199    this->setEnd(this->end() + NumInputs);
200  }
201
202  /// uninitialized_copy - Copy the range [I, E) onto the uninitialized memory
203  /// starting with "Dest", constructing elements into it as needed.
204  template<typename It1, typename It2>
205  static void uninitialized_copy(It1 I, It1 E, It2 Dest) {
206    std::uninitialized_copy(I, E, Dest);
207  }
208
209  iterator insert(ASTContext &C, iterator I, const T &Elt) {
210    if (I == this->end()) {  // Important special case for empty vector.
211      push_back(Elt);
212      return this->end()-1;
213    }
214
215    if (this->EndX < this->CapacityX) {
216    Retry:
217      new (this->end()) T(this->back());
218      this->setEnd(this->end()+1);
219      // Push everything else over.
220      std::copy_backward(I, this->end()-1, this->end());
221      *I = Elt;
222      return I;
223    }
224    size_t EltNo = I-this->begin();
225    this->grow(C);
226    I = this->begin()+EltNo;
227    goto Retry;
228  }
229
230  iterator insert(ASTContext &C, iterator I, size_type NumToInsert,
231                  const T &Elt) {
232    if (I == this->end()) {  // Important special case for empty vector.
233      append(C, NumToInsert, Elt);
234      return this->end()-1;
235    }
236
237    // Convert iterator to elt# to avoid invalidating iterator when we reserve()
238    size_t InsertElt = I - this->begin();
239
240    // Ensure there is enough space.
241    reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
242
243    // Uninvalidate the iterator.
244    I = this->begin()+InsertElt;
245
246    // If there are more elements between the insertion point and the end of the
247    // range than there are being inserted, we can use a simple approach to
248    // insertion.  Since we already reserved space, we know that this won't
249    // reallocate the vector.
250    if (size_t(this->end()-I) >= NumToInsert) {
251      T *OldEnd = this->end();
252      append(C, this->end()-NumToInsert, this->end());
253
254      // Copy the existing elements that get replaced.
255      std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
256
257      std::fill_n(I, NumToInsert, Elt);
258      return I;
259    }
260
261    // Otherwise, we're inserting more elements than exist already, and we're
262    // not inserting at the end.
263
264    // Copy over the elements that we're about to overwrite.
265    T *OldEnd = this->end();
266    this->setEnd(this->end() + NumToInsert);
267    size_t NumOverwritten = OldEnd-I;
268    this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
269
270    // Replace the overwritten part.
271    std::fill_n(I, NumOverwritten, Elt);
272
273    // Insert the non-overwritten middle part.
274    std::uninitialized_fill_n(OldEnd, NumToInsert-NumOverwritten, Elt);
275    return I;
276  }
277
278  template<typename ItTy>
279  iterator insert(ASTContext &C, iterator I, ItTy From, ItTy To) {
280    if (I == this->end()) {  // Important special case for empty vector.
281      append(C, From, To);
282      return this->end()-1;
283    }
284
285    size_t NumToInsert = std::distance(From, To);
286    // Convert iterator to elt# to avoid invalidating iterator when we reserve()
287    size_t InsertElt = I - this->begin();
288
289    // Ensure there is enough space.
290    reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
291
292    // Uninvalidate the iterator.
293    I = this->begin()+InsertElt;
294
295    // If there are more elements between the insertion point and the end of the
296    // range than there are being inserted, we can use a simple approach to
297    // insertion.  Since we already reserved space, we know that this won't
298    // reallocate the vector.
299    if (size_t(this->end()-I) >= NumToInsert) {
300      T *OldEnd = this->end();
301      append(C, this->end()-NumToInsert, this->end());
302
303      // Copy the existing elements that get replaced.
304      std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
305
306      std::copy(From, To, I);
307      return I;
308    }
309
310    // Otherwise, we're inserting more elements than exist already, and we're
311    // not inserting at the end.
312
313    // Copy over the elements that we're about to overwrite.
314    T *OldEnd = this->end();
315    this->setEnd(this->end() + NumToInsert);
316    size_t NumOverwritten = OldEnd-I;
317    this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
318
319    // Replace the overwritten part.
320    for (; NumOverwritten > 0; --NumOverwritten) {
321      *I = *From;
322      ++I; ++From;
323    }
324
325    // Insert the non-overwritten middle part.
326    this->uninitialized_copy(From, To, OldEnd);
327    return I;
328  }
329
330  void resize(ASTContext &C, unsigned N, const T &NV) {
331    if (N < this->size()) {
332      this->destroy_range(this->begin()+N, this->end());
333      this->setEnd(this->begin()+N);
334    } else if (N > this->size()) {
335      if (this->capacity() < N)
336        this->grow(C, N);
337      construct_range(this->end(), this->begin()+N, NV);
338      this->setEnd(this->begin()+N);
339    }
340  }
341
342private:
343  /// grow - double the size of the allocated memory, guaranteeing space for at
344  /// least one more element or MinSize if specified.
345  void grow(ASTContext &C, size_type MinSize = 1);
346
347  void construct_range(T *S, T *E, const T &Elt) {
348    for (; S != E; ++S)
349      new (S) T(Elt);
350  }
351
352  void destroy_range(T *S, T *E) {
353    while (S != E) {
354      --E;
355      E->~T();
356    }
357  }
358
359protected:
360  iterator capacity_ptr() { return (iterator)this->Capacity; }
361};
362
363// Define this out-of-line to dissuade the C++ compiler from inlining it.
364template <typename T>
365void ASTVector<T>::grow(ASTContext &C, size_t MinSize) {
366  size_t CurCapacity = Capacity-Begin;
367  size_t CurSize = size();
368  size_t NewCapacity = 2*CurCapacity;
369  if (NewCapacity < MinSize)
370    NewCapacity = MinSize;
371
372  // Allocate the memory from the ASTContext.
373  T *NewElts = new (C) T[NewCapacity];
374
375  // Copy the elements over.
376  if (llvm::is_class<T>::value) {
377    std::uninitialized_copy(Begin, End, NewElts);
378    // Destroy the original elements.
379    destroy_range(Begin, End);
380  }
381  else {
382    // Use memcpy for PODs (std::uninitialized_copy optimizes to memmove).
383    memcpy(NewElts, Begin, CurSize * sizeof(T));
384  }
385
386  C.Deallocate(Begin);
387  Begin = NewElts;
388  End = NewElts+CurSize;
389  Capacity = Begin+NewCapacity;
390}
391
392} // end: clang namespace
393#endif
394