ASTVector.h revision 2a82ca255b0f99f6201a75ed52b91fc024f6e9cf
1//===- ASTVector.h - Vector that uses ASTContext for allocation  --*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file provides ASTVector, a vector  ADT whose contents are
11//  allocated using the allocator associated with an ASTContext..
12//
13//===----------------------------------------------------------------------===//
14
15// FIXME: Most of this is copy-and-paste from BumpVector.h and SmallVector.h.
16// We can refactor this core logic into something common.
17
18#ifndef LLVM_CLANG_AST_VECTOR
19#define LLVM_CLANG_AST_VECTOR
20
21#include "llvm/Support/type_traits.h"
22#include "llvm/Support/Allocator.h"
23#include "llvm/ADT/PointerIntPair.h"
24#include <algorithm>
25#include <memory>
26#include <cstring>
27
28#ifdef _MSC_VER
29namespace std {
30#if _MSC_VER <= 1310
31  // Work around flawed VC++ implementation of std::uninitialized_copy.  Define
32  // additional overloads so that elements with pointer types are recognized as
33  // scalars and not objects, causing bizarre type conversion errors.
34  template<class T1, class T2>
35  inline _Scalar_ptr_iterator_tag _Ptr_cat(T1 **, T2 **) {
36    _Scalar_ptr_iterator_tag _Cat;
37    return _Cat;
38  }
39
40  template<class T1, class T2>
41  inline _Scalar_ptr_iterator_tag _Ptr_cat(T1* const *, T2 **) {
42    _Scalar_ptr_iterator_tag _Cat;
43    return _Cat;
44  }
45#else
46  // FIXME: It is not clear if the problem is fixed in VS 2005.  What is clear
47  // is that the above hack won't work if it wasn't fixed.
48#endif
49}
50#endif
51
52namespace clang {
53
54template<typename T>
55class ASTVector {
56  T *Begin, *End, *Capacity;
57
58  void setEnd(T *P) { this->End = P; }
59
60public:
61  // Default ctor - Initialize to empty.
62  ASTVector() : Begin(NULL), End(NULL), Capacity(NULL) { }
63
64  ASTVector(ASTContext &C, unsigned N)
65  : Begin(NULL), End(NULL), Capacity(NULL) {
66    reserve(C, N);
67  }
68
69  ~ASTVector() {
70    if (llvm::is_class<T>::value) {
71      // Destroy the constructed elements in the vector.
72      destroy_range(Begin, End);
73    }
74  }
75
76  typedef size_t size_type;
77  typedef ptrdiff_t difference_type;
78  typedef T value_type;
79  typedef T* iterator;
80  typedef const T* const_iterator;
81
82  typedef std::reverse_iterator<const_iterator>  const_reverse_iterator;
83  typedef std::reverse_iterator<iterator>  reverse_iterator;
84
85  typedef T& reference;
86  typedef const T& const_reference;
87  typedef T* pointer;
88  typedef const T* const_pointer;
89
90  // forward iterator creation methods.
91  iterator begin() { return Begin; }
92  const_iterator begin() const { return Begin; }
93  iterator end() { return End; }
94  const_iterator end() const { return End; }
95
96  // reverse iterator creation methods.
97  reverse_iterator rbegin()            { return reverse_iterator(end()); }
98  const_reverse_iterator rbegin() const{ return const_reverse_iterator(end()); }
99  reverse_iterator rend()              { return reverse_iterator(begin()); }
100  const_reverse_iterator rend() const { return const_reverse_iterator(begin());}
101
102  bool empty() const { return Begin == End; }
103  size_type size() const { return End-Begin; }
104
105  reference operator[](unsigned idx) {
106    assert(Begin + idx < End);
107    return Begin[idx];
108  }
109  const_reference operator[](unsigned idx) const {
110    assert(Begin + idx < End);
111    return Begin[idx];
112  }
113
114  reference front() {
115    return begin()[0];
116  }
117  const_reference front() const {
118    return begin()[0];
119  }
120
121  reference back() {
122    return end()[-1];
123  }
124  const_reference back() const {
125    return end()[-1];
126  }
127
128  void pop_back() {
129    --End;
130    End->~T();
131  }
132
133  T pop_back_val() {
134    T Result = back();
135    pop_back();
136    return Result;
137  }
138
139  void clear() {
140    if (llvm::is_class<T>::value) {
141      destroy_range(Begin, End);
142    }
143    End = Begin;
144  }
145
146  /// data - Return a pointer to the vector's buffer, even if empty().
147  pointer data() {
148    return pointer(Begin);
149  }
150
151  /// data - Return a pointer to the vector's buffer, even if empty().
152  const_pointer data() const {
153    return const_pointer(Begin);
154  }
155
156  void push_back(const_reference Elt, ASTContext &C) {
157    if (End < Capacity) {
158    Retry:
159      new (End) T(Elt);
160      ++End;
161      return;
162    }
163    grow(C);
164    goto Retry;
165  }
166
167  void reserve(ASTContext &C, unsigned N) {
168    if (unsigned(Capacity-Begin) < N)
169      grow(C, N);
170  }
171
172  /// capacity - Return the total number of elements in the currently allocated
173  /// buffer.
174  size_t capacity() const { return Capacity - Begin; }
175
176  /// append - Add the specified range to the end of the SmallVector.
177  ///
178  template<typename in_iter>
179  void append(ASTContext &C, in_iter in_start, in_iter in_end) {
180    size_type NumInputs = std::distance(in_start, in_end);
181
182    if (NumInputs == 0)
183      return;
184
185    // Grow allocated space if needed.
186    if (NumInputs > size_type(this->capacity_ptr()-this->end()))
187      this->grow(C, this->size()+NumInputs);
188
189    // Copy the new elements over.
190    // TODO: NEED To compile time dispatch on whether in_iter is a random access
191    // iterator to use the fast uninitialized_copy.
192    std::uninitialized_copy(in_start, in_end, this->end());
193    this->setEnd(this->end() + NumInputs);
194  }
195
196  /// append - Add the specified range to the end of the SmallVector.
197  ///
198  void append(ASTContext &C, size_type NumInputs, const T &Elt) {
199    // Grow allocated space if needed.
200    if (NumInputs > size_type(this->capacity_ptr()-this->end()))
201      this->grow(C, this->size()+NumInputs);
202
203    // Copy the new elements over.
204    std::uninitialized_fill_n(this->end(), NumInputs, Elt);
205    this->setEnd(this->end() + NumInputs);
206  }
207
208  /// uninitialized_copy - Copy the range [I, E) onto the uninitialized memory
209  /// starting with "Dest", constructing elements into it as needed.
210  template<typename It1, typename It2>
211  static void uninitialized_copy(It1 I, It1 E, It2 Dest) {
212    std::uninitialized_copy(I, E, Dest);
213  }
214
215  iterator insert(ASTContext &C, iterator I, const T &Elt) {
216    if (I == this->end()) {  // Important special case for empty vector.
217      push_back(Elt);
218      return this->end()-1;
219    }
220
221    if (this->EndX < this->CapacityX) {
222    Retry:
223      new (this->end()) T(this->back());
224      this->setEnd(this->end()+1);
225      // Push everything else over.
226      std::copy_backward(I, this->end()-1, this->end());
227      *I = Elt;
228      return I;
229    }
230    size_t EltNo = I-this->begin();
231    this->grow(C);
232    I = this->begin()+EltNo;
233    goto Retry;
234  }
235
236  iterator insert(ASTContext &C, iterator I, size_type NumToInsert,
237                  const T &Elt) {
238    if (I == this->end()) {  // Important special case for empty vector.
239      append(C, NumToInsert, Elt);
240      return this->end()-1;
241    }
242
243    // Convert iterator to elt# to avoid invalidating iterator when we reserve()
244    size_t InsertElt = I - this->begin();
245
246    // Ensure there is enough space.
247    reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
248
249    // Uninvalidate the iterator.
250    I = this->begin()+InsertElt;
251
252    // If there are more elements between the insertion point and the end of the
253    // range than there are being inserted, we can use a simple approach to
254    // insertion.  Since we already reserved space, we know that this won't
255    // reallocate the vector.
256    if (size_t(this->end()-I) >= NumToInsert) {
257      T *OldEnd = this->end();
258      append(C, this->end()-NumToInsert, this->end());
259
260      // Copy the existing elements that get replaced.
261      std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
262
263      std::fill_n(I, NumToInsert, Elt);
264      return I;
265    }
266
267    // Otherwise, we're inserting more elements than exist already, and we're
268    // not inserting at the end.
269
270    // Copy over the elements that we're about to overwrite.
271    T *OldEnd = this->end();
272    this->setEnd(this->end() + NumToInsert);
273    size_t NumOverwritten = OldEnd-I;
274    this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
275
276    // Replace the overwritten part.
277    std::fill_n(I, NumOverwritten, Elt);
278
279    // Insert the non-overwritten middle part.
280    std::uninitialized_fill_n(OldEnd, NumToInsert-NumOverwritten, Elt);
281    return I;
282  }
283
284  template<typename ItTy>
285  iterator insert(ASTContext &C, iterator I, ItTy From, ItTy To) {
286    if (I == this->end()) {  // Important special case for empty vector.
287      append(C, From, To);
288      return this->end()-1;
289    }
290
291    size_t NumToInsert = std::distance(From, To);
292    // Convert iterator to elt# to avoid invalidating iterator when we reserve()
293    size_t InsertElt = I - this->begin();
294
295    // Ensure there is enough space.
296    reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
297
298    // Uninvalidate the iterator.
299    I = this->begin()+InsertElt;
300
301    // If there are more elements between the insertion point and the end of the
302    // range than there are being inserted, we can use a simple approach to
303    // insertion.  Since we already reserved space, we know that this won't
304    // reallocate the vector.
305    if (size_t(this->end()-I) >= NumToInsert) {
306      T *OldEnd = this->end();
307      append(C, this->end()-NumToInsert, this->end());
308
309      // Copy the existing elements that get replaced.
310      std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
311
312      std::copy(From, To, I);
313      return I;
314    }
315
316    // Otherwise, we're inserting more elements than exist already, and we're
317    // not inserting at the end.
318
319    // Copy over the elements that we're about to overwrite.
320    T *OldEnd = this->end();
321    this->setEnd(this->end() + NumToInsert);
322    size_t NumOverwritten = OldEnd-I;
323    this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
324
325    // Replace the overwritten part.
326    for (; NumOverwritten > 0; --NumOverwritten) {
327      *I = *From;
328      ++I; ++From;
329    }
330
331    // Insert the non-overwritten middle part.
332    this->uninitialized_copy(From, To, OldEnd);
333    return I;
334  }
335
336  void resize(ASTContext &C, unsigned N, const T &NV) {
337    if (N < this->size()) {
338      this->destroy_range(this->begin()+N, this->end());
339      this->setEnd(this->begin()+N);
340    } else if (N > this->size()) {
341      if (this->capacity() < N)
342        this->grow(C, N);
343      construct_range(this->end(), this->begin()+N, NV);
344      this->setEnd(this->begin()+N);
345    }
346  }
347
348private:
349  /// grow - double the size of the allocated memory, guaranteeing space for at
350  /// least one more element or MinSize if specified.
351  void grow(ASTContext &C, size_type MinSize = 1);
352
353  void construct_range(T *S, T *E, const T &Elt) {
354    for (; S != E; ++S)
355      new (S) T(Elt);
356  }
357
358  void destroy_range(T *S, T *E) {
359    while (S != E) {
360      --E;
361      E->~T();
362    }
363  }
364
365protected:
366  iterator capacity_ptr() { return (iterator)this->Capacity; }
367};
368
369// Define this out-of-line to dissuade the C++ compiler from inlining it.
370template <typename T>
371void ASTVector<T>::grow(ASTContext &C, size_t MinSize) {
372  size_t CurCapacity = Capacity-Begin;
373  size_t CurSize = size();
374  size_t NewCapacity = 2*CurCapacity;
375  if (NewCapacity < MinSize)
376    NewCapacity = MinSize;
377
378  // Allocate the memory from the ASTContext.
379  T *NewElts = new (C, llvm::alignOf<T>()) T[NewCapacity];
380
381  // Copy the elements over.
382  if (llvm::is_class<T>::value) {
383    std::uninitialized_copy(Begin, End, NewElts);
384    // Destroy the original elements.
385    destroy_range(Begin, End);
386  }
387  else {
388    // Use memcpy for PODs (std::uninitialized_copy optimizes to memmove).
389    memcpy(NewElts, Begin, CurSize * sizeof(T));
390  }
391
392  // ASTContext never frees any memory.
393  Begin = NewElts;
394  End = NewElts+CurSize;
395  Capacity = Begin+NewCapacity;
396}
397
398} // end: clang namespace
399#endif
400