ASTVector.h revision 05ed1a0587edcf3b8ee84a67d8c8ca76753d1fc1
1//===- ASTVector.h - Vector that uses ASTContext for allocation  --*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file provides ASTVector, a vector  ADT whose contents are
11//  allocated using the allocator associated with an ASTContext..
12//
13//===----------------------------------------------------------------------===//
14
15// FIXME: Most of this is copy-and-paste from BumpVector.h and SmallVector.h.
16// We can refactor this core logic into something common.
17
18#ifndef LLVM_CLANG_AST_VECTOR
19#define LLVM_CLANG_AST_VECTOR
20
21#include "clang/AST/AttrIterator.h"
22#include "llvm/ADT/PointerIntPair.h"
23#include "llvm/Support/Allocator.h"
24#include "llvm/Support/type_traits.h"
25#include <algorithm>
26#include <cstring>
27#include <memory>
28
29#ifdef _MSC_VER
30namespace std {
31#if _MSC_VER <= 1310
32  // Work around flawed VC++ implementation of std::uninitialized_copy.  Define
33  // additional overloads so that elements with pointer types are recognized as
34  // scalars and not objects, causing bizarre type conversion errors.
35  template<class T1, class T2>
36  inline _Scalar_ptr_iterator_tag _Ptr_cat(T1 **, T2 **) {
37    _Scalar_ptr_iterator_tag _Cat;
38    return _Cat;
39  }
40
41  template<class T1, class T2>
42  inline _Scalar_ptr_iterator_tag _Ptr_cat(T1* const *, T2 **) {
43    _Scalar_ptr_iterator_tag _Cat;
44    return _Cat;
45  }
46#else
47  // FIXME: It is not clear if the problem is fixed in VS 2005.  What is clear
48  // is that the above hack won't work if it wasn't fixed.
49#endif
50}
51#endif
52
53namespace clang {
54  class ASTContext;
55
56template<typename T>
57class ASTVector {
58  T *Begin, *End, *Capacity;
59
60  void setEnd(T *P) { this->End = P; }
61
62public:
63  // Default ctor - Initialize to empty.
64  ASTVector() : Begin(NULL), End(NULL), Capacity(NULL) { }
65
66  ASTVector(const ASTContext &C, unsigned N)
67  : Begin(NULL), End(NULL), Capacity(NULL) {
68    reserve(C, N);
69  }
70
71  ~ASTVector() {
72    if (llvm::is_class<T>::value) {
73      // Destroy the constructed elements in the vector.
74      destroy_range(Begin, End);
75    }
76  }
77
78  typedef size_t size_type;
79  typedef ptrdiff_t difference_type;
80  typedef T value_type;
81  typedef T* iterator;
82  typedef const T* const_iterator;
83
84  typedef std::reverse_iterator<const_iterator>  const_reverse_iterator;
85  typedef std::reverse_iterator<iterator>  reverse_iterator;
86
87  typedef T& reference;
88  typedef const T& const_reference;
89  typedef T* pointer;
90  typedef const T* const_pointer;
91
92  // forward iterator creation methods.
93  iterator begin() { return Begin; }
94  const_iterator begin() const { return Begin; }
95  iterator end() { return End; }
96  const_iterator end() const { return End; }
97
98  // reverse iterator creation methods.
99  reverse_iterator rbegin()            { return reverse_iterator(end()); }
100  const_reverse_iterator rbegin() const{ return const_reverse_iterator(end()); }
101  reverse_iterator rend()              { return reverse_iterator(begin()); }
102  const_reverse_iterator rend() const { return const_reverse_iterator(begin());}
103
104  bool empty() const { return Begin == End; }
105  size_type size() const { return End-Begin; }
106
107  reference operator[](unsigned idx) {
108    assert(Begin + idx < End);
109    return Begin[idx];
110  }
111  const_reference operator[](unsigned idx) const {
112    assert(Begin + idx < End);
113    return Begin[idx];
114  }
115
116  reference front() {
117    return begin()[0];
118  }
119  const_reference front() const {
120    return begin()[0];
121  }
122
123  reference back() {
124    return end()[-1];
125  }
126  const_reference back() const {
127    return end()[-1];
128  }
129
130  void pop_back() {
131    --End;
132    End->~T();
133  }
134
135  T pop_back_val() {
136    T Result = back();
137    pop_back();
138    return Result;
139  }
140
141  void clear() {
142    if (llvm::is_class<T>::value) {
143      destroy_range(Begin, End);
144    }
145    End = Begin;
146  }
147
148  /// data - Return a pointer to the vector's buffer, even if empty().
149  pointer data() {
150    return pointer(Begin);
151  }
152
153  /// data - Return a pointer to the vector's buffer, even if empty().
154  const_pointer data() const {
155    return const_pointer(Begin);
156  }
157
158  void push_back(const_reference Elt, const ASTContext &C) {
159    if (End < Capacity) {
160    Retry:
161      new (End) T(Elt);
162      ++End;
163      return;
164    }
165    grow(C);
166    goto Retry;
167  }
168
169  void reserve(const ASTContext &C, unsigned N) {
170    if (unsigned(Capacity-Begin) < N)
171      grow(C, N);
172  }
173
174  /// capacity - Return the total number of elements in the currently allocated
175  /// buffer.
176  size_t capacity() const { return Capacity - Begin; }
177
178  /// append - Add the specified range to the end of the SmallVector.
179  ///
180  template<typename in_iter>
181  void append(const ASTContext &C, in_iter in_start, in_iter in_end) {
182    size_type NumInputs = std::distance(in_start, in_end);
183
184    if (NumInputs == 0)
185      return;
186
187    // Grow allocated space if needed.
188    if (NumInputs > size_type(this->capacity_ptr()-this->end()))
189      this->grow(C, this->size()+NumInputs);
190
191    // Copy the new elements over.
192    // TODO: NEED To compile time dispatch on whether in_iter is a random access
193    // iterator to use the fast uninitialized_copy.
194    std::uninitialized_copy(in_start, in_end, this->end());
195    this->setEnd(this->end() + NumInputs);
196  }
197
198  /// append - Add the specified range to the end of the SmallVector.
199  ///
200  void append(const ASTContext &C, size_type NumInputs, const T &Elt) {
201    // Grow allocated space if needed.
202    if (NumInputs > size_type(this->capacity_ptr()-this->end()))
203      this->grow(C, this->size()+NumInputs);
204
205    // Copy the new elements over.
206    std::uninitialized_fill_n(this->end(), NumInputs, Elt);
207    this->setEnd(this->end() + NumInputs);
208  }
209
210  /// uninitialized_copy - Copy the range [I, E) onto the uninitialized memory
211  /// starting with "Dest", constructing elements into it as needed.
212  template<typename It1, typename It2>
213  static void uninitialized_copy(It1 I, It1 E, It2 Dest) {
214    std::uninitialized_copy(I, E, Dest);
215  }
216
217  iterator insert(const ASTContext &C, iterator I, const T &Elt) {
218    if (I == this->end()) {  // Important special case for empty vector.
219      push_back(Elt, C);
220      return this->end()-1;
221    }
222
223    if (this->End < this->Capacity) {
224    Retry:
225      new (this->end()) T(this->back());
226      this->setEnd(this->end()+1);
227      // Push everything else over.
228      std::copy_backward(I, this->end()-1, this->end());
229      *I = Elt;
230      return I;
231    }
232    size_t EltNo = I-this->begin();
233    this->grow(C);
234    I = this->begin()+EltNo;
235    goto Retry;
236  }
237
238  iterator insert(const ASTContext &C, iterator I, size_type NumToInsert,
239                  const T &Elt) {
240    if (I == this->end()) {  // Important special case for empty vector.
241      append(C, NumToInsert, Elt);
242      return this->end()-1;
243    }
244
245    // Convert iterator to elt# to avoid invalidating iterator when we reserve()
246    size_t InsertElt = I - this->begin();
247
248    // Ensure there is enough space.
249    reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
250
251    // Uninvalidate the iterator.
252    I = this->begin()+InsertElt;
253
254    // If there are more elements between the insertion point and the end of the
255    // range than there are being inserted, we can use a simple approach to
256    // insertion.  Since we already reserved space, we know that this won't
257    // reallocate the vector.
258    if (size_t(this->end()-I) >= NumToInsert) {
259      T *OldEnd = this->end();
260      append(C, this->end()-NumToInsert, this->end());
261
262      // Copy the existing elements that get replaced.
263      std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
264
265      std::fill_n(I, NumToInsert, Elt);
266      return I;
267    }
268
269    // Otherwise, we're inserting more elements than exist already, and we're
270    // not inserting at the end.
271
272    // Copy over the elements that we're about to overwrite.
273    T *OldEnd = this->end();
274    this->setEnd(this->end() + NumToInsert);
275    size_t NumOverwritten = OldEnd-I;
276    this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
277
278    // Replace the overwritten part.
279    std::fill_n(I, NumOverwritten, Elt);
280
281    // Insert the non-overwritten middle part.
282    std::uninitialized_fill_n(OldEnd, NumToInsert-NumOverwritten, Elt);
283    return I;
284  }
285
286  template<typename ItTy>
287  iterator insert(const ASTContext &C, iterator I, ItTy From, ItTy To) {
288    if (I == this->end()) {  // Important special case for empty vector.
289      append(C, From, To);
290      return this->end()-1;
291    }
292
293    size_t NumToInsert = std::distance(From, To);
294    // Convert iterator to elt# to avoid invalidating iterator when we reserve()
295    size_t InsertElt = I - this->begin();
296
297    // Ensure there is enough space.
298    reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
299
300    // Uninvalidate the iterator.
301    I = this->begin()+InsertElt;
302
303    // If there are more elements between the insertion point and the end of the
304    // range than there are being inserted, we can use a simple approach to
305    // insertion.  Since we already reserved space, we know that this won't
306    // reallocate the vector.
307    if (size_t(this->end()-I) >= NumToInsert) {
308      T *OldEnd = this->end();
309      append(C, this->end()-NumToInsert, this->end());
310
311      // Copy the existing elements that get replaced.
312      std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
313
314      std::copy(From, To, I);
315      return I;
316    }
317
318    // Otherwise, we're inserting more elements than exist already, and we're
319    // not inserting at the end.
320
321    // Copy over the elements that we're about to overwrite.
322    T *OldEnd = this->end();
323    this->setEnd(this->end() + NumToInsert);
324    size_t NumOverwritten = OldEnd-I;
325    this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
326
327    // Replace the overwritten part.
328    for (; NumOverwritten > 0; --NumOverwritten) {
329      *I = *From;
330      ++I; ++From;
331    }
332
333    // Insert the non-overwritten middle part.
334    this->uninitialized_copy(From, To, OldEnd);
335    return I;
336  }
337
338  void resize(const ASTContext &C, unsigned N, const T &NV) {
339    if (N < this->size()) {
340      this->destroy_range(this->begin()+N, this->end());
341      this->setEnd(this->begin()+N);
342    } else if (N > this->size()) {
343      if (this->capacity() < N)
344        this->grow(C, N);
345      construct_range(this->end(), this->begin()+N, NV);
346      this->setEnd(this->begin()+N);
347    }
348  }
349
350private:
351  /// grow - double the size of the allocated memory, guaranteeing space for at
352  /// least one more element or MinSize if specified.
353  void grow(const ASTContext &C, size_type MinSize = 1);
354
355  void construct_range(T *S, T *E, const T &Elt) {
356    for (; S != E; ++S)
357      new (S) T(Elt);
358  }
359
360  void destroy_range(T *S, T *E) {
361    while (S != E) {
362      --E;
363      E->~T();
364    }
365  }
366
367protected:
368  iterator capacity_ptr() { return (iterator)this->Capacity; }
369};
370
371// Define this out-of-line to dissuade the C++ compiler from inlining it.
372template <typename T>
373void ASTVector<T>::grow(const ASTContext &C, size_t MinSize) {
374  size_t CurCapacity = Capacity-Begin;
375  size_t CurSize = size();
376  size_t NewCapacity = 2*CurCapacity;
377  if (NewCapacity < MinSize)
378    NewCapacity = MinSize;
379
380  // Allocate the memory from the ASTContext.
381  T *NewElts = new (C, llvm::alignOf<T>()) T[NewCapacity];
382
383  // Copy the elements over.
384  if (llvm::is_class<T>::value) {
385    std::uninitialized_copy(Begin, End, NewElts);
386    // Destroy the original elements.
387    destroy_range(Begin, End);
388  }
389  else {
390    // Use memcpy for PODs (std::uninitialized_copy optimizes to memmove).
391    memcpy(NewElts, Begin, CurSize * sizeof(T));
392  }
393
394  // ASTContext never frees any memory.
395  Begin = NewElts;
396  End = NewElts+CurSize;
397  Capacity = Begin+NewCapacity;
398}
399
400} // end: clang namespace
401#endif
402