ASTVector.h revision 478851c3ed6bd784e7377dffd8e57b200c1b9ba9
1//===- ASTVector.h - Vector that uses ASTContext for allocation  --*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file provides ASTVector, a vector  ADT whose contents are
11//  allocated using the allocator associated with an ASTContext..
12//
13//===----------------------------------------------------------------------===//
14
15// FIXME: Most of this is copy-and-paste from BumpVector.h and SmallVector.h.
16// We can refactor this core logic into something common.
17
18#ifndef LLVM_CLANG_AST_VECTOR
19#define LLVM_CLANG_AST_VECTOR
20
21#include "llvm/Support/type_traits.h"
22#include "llvm/Support/Allocator.h"
23#include "llvm/ADT/PointerIntPair.h"
24#include <algorithm>
25#include <memory>
26#include <cstring>
27
28#ifdef _MSC_VER
29namespace std {
30#if _MSC_VER <= 1310
31  // Work around flawed VC++ implementation of std::uninitialized_copy.  Define
32  // additional overloads so that elements with pointer types are recognized as
33  // scalars and not objects, causing bizarre type conversion errors.
34  template<class T1, class T2>
35  inline _Scalar_ptr_iterator_tag _Ptr_cat(T1 **, T2 **) {
36    _Scalar_ptr_iterator_tag _Cat;
37    return _Cat;
38  }
39
40  template<class T1, class T2>
41  inline _Scalar_ptr_iterator_tag _Ptr_cat(T1* const *, T2 **) {
42    _Scalar_ptr_iterator_tag _Cat;
43    return _Cat;
44  }
45#else
46  // FIXME: It is not clear if the problem is fixed in VS 2005.  What is clear
47  // is that the above hack won't work if it wasn't fixed.
48#endif
49}
50#endif
51
52namespace clang {
53
54template<typename T>
55class ASTVector {
56  T *Begin, *End, *Capacity;
57
58  void setEnd(T *P) { this->End = P; }
59
60public:
61  // Default ctor - Initialize to empty.
62  explicit ASTVector(ASTContext &C, unsigned N = 0)
63  : Begin(NULL), End(NULL), Capacity(NULL) {
64    reserve(C, N);
65  }
66
67  ~ASTVector() {
68    if (llvm::is_class<T>::value) {
69      // Destroy the constructed elements in the vector.
70      destroy_range(Begin, End);
71    }
72  }
73
74  typedef size_t size_type;
75  typedef ptrdiff_t difference_type;
76  typedef T value_type;
77  typedef T* iterator;
78  typedef const T* const_iterator;
79
80  typedef std::reverse_iterator<const_iterator>  const_reverse_iterator;
81  typedef std::reverse_iterator<iterator>  reverse_iterator;
82
83  typedef T& reference;
84  typedef const T& const_reference;
85  typedef T* pointer;
86  typedef const T* const_pointer;
87
88  // forward iterator creation methods.
89  iterator begin() { return Begin; }
90  const_iterator begin() const { return Begin; }
91  iterator end() { return End; }
92  const_iterator end() const { return End; }
93
94  // reverse iterator creation methods.
95  reverse_iterator rbegin()            { return reverse_iterator(end()); }
96  const_reverse_iterator rbegin() const{ return const_reverse_iterator(end()); }
97  reverse_iterator rend()              { return reverse_iterator(begin()); }
98  const_reverse_iterator rend() const { return const_reverse_iterator(begin());}
99
100  bool empty() const { return Begin == End; }
101  size_type size() const { return End-Begin; }
102
103  reference operator[](unsigned idx) {
104    assert(Begin + idx < End);
105    return Begin[idx];
106  }
107  const_reference operator[](unsigned idx) const {
108    assert(Begin + idx < End);
109    return Begin[idx];
110  }
111
112  reference front() {
113    return begin()[0];
114  }
115  const_reference front() const {
116    return begin()[0];
117  }
118
119  reference back() {
120    return end()[-1];
121  }
122  const_reference back() const {
123    return end()[-1];
124  }
125
126  void pop_back() {
127    --End;
128    End->~T();
129  }
130
131  T pop_back_val() {
132    T Result = back();
133    pop_back();
134    return Result;
135  }
136
137  void clear() {
138    if (llvm::is_class<T>::value) {
139      destroy_range(Begin, End);
140    }
141    End = Begin;
142  }
143
144  /// data - Return a pointer to the vector's buffer, even if empty().
145  pointer data() {
146    return pointer(Begin);
147  }
148
149  /// data - Return a pointer to the vector's buffer, even if empty().
150  const_pointer data() const {
151    return const_pointer(Begin);
152  }
153
154  void push_back(const_reference Elt, ASTContext &C) {
155    if (End < Capacity) {
156    Retry:
157      new (End) T(Elt);
158      ++End;
159      return;
160    }
161    grow(C);
162    goto Retry;
163  }
164
165  void reserve(ASTContext &C, unsigned N) {
166    if (unsigned(Capacity-Begin) < N)
167      grow(C, N);
168  }
169
170  /// capacity - Return the total number of elements in the currently allocated
171  /// buffer.
172  size_t capacity() const { return Capacity - Begin; }
173
174  /// append - Add the specified range to the end of the SmallVector.
175  ///
176  template<typename in_iter>
177  void append(ASTContext &C, in_iter in_start, in_iter in_end) {
178    size_type NumInputs = std::distance(in_start, in_end);
179
180    if (NumInputs == 0)
181      return;
182
183    // Grow allocated space if needed.
184    if (NumInputs > size_type(this->capacity_ptr()-this->end()))
185      this->grow(C, this->size()+NumInputs);
186
187    // Copy the new elements over.
188    // TODO: NEED To compile time dispatch on whether in_iter is a random access
189    // iterator to use the fast uninitialized_copy.
190    std::uninitialized_copy(in_start, in_end, this->end());
191    this->setEnd(this->end() + NumInputs);
192  }
193
194  /// append - Add the specified range to the end of the SmallVector.
195  ///
196  void append(ASTContext &C, size_type NumInputs, const T &Elt) {
197    // Grow allocated space if needed.
198    if (NumInputs > size_type(this->capacity_ptr()-this->end()))
199      this->grow(C, this->size()+NumInputs);
200
201    // Copy the new elements over.
202    std::uninitialized_fill_n(this->end(), NumInputs, Elt);
203    this->setEnd(this->end() + NumInputs);
204  }
205
206  /// uninitialized_copy - Copy the range [I, E) onto the uninitialized memory
207  /// starting with "Dest", constructing elements into it as needed.
208  template<typename It1, typename It2>
209  static void uninitialized_copy(It1 I, It1 E, It2 Dest) {
210    std::uninitialized_copy(I, E, Dest);
211  }
212
213  iterator insert(ASTContext &C, iterator I, const T &Elt) {
214    if (I == this->end()) {  // Important special case for empty vector.
215      push_back(Elt);
216      return this->end()-1;
217    }
218
219    if (this->EndX < this->CapacityX) {
220    Retry:
221      new (this->end()) T(this->back());
222      this->setEnd(this->end()+1);
223      // Push everything else over.
224      std::copy_backward(I, this->end()-1, this->end());
225      *I = Elt;
226      return I;
227    }
228    size_t EltNo = I-this->begin();
229    this->grow(C);
230    I = this->begin()+EltNo;
231    goto Retry;
232  }
233
234  iterator insert(ASTContext &C, iterator I, size_type NumToInsert,
235                  const T &Elt) {
236    if (I == this->end()) {  // Important special case for empty vector.
237      append(C, NumToInsert, Elt);
238      return this->end()-1;
239    }
240
241    // Convert iterator to elt# to avoid invalidating iterator when we reserve()
242    size_t InsertElt = I - this->begin();
243
244    // Ensure there is enough space.
245    reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
246
247    // Uninvalidate the iterator.
248    I = this->begin()+InsertElt;
249
250    // If there are more elements between the insertion point and the end of the
251    // range than there are being inserted, we can use a simple approach to
252    // insertion.  Since we already reserved space, we know that this won't
253    // reallocate the vector.
254    if (size_t(this->end()-I) >= NumToInsert) {
255      T *OldEnd = this->end();
256      append(C, this->end()-NumToInsert, this->end());
257
258      // Copy the existing elements that get replaced.
259      std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
260
261      std::fill_n(I, NumToInsert, Elt);
262      return I;
263    }
264
265    // Otherwise, we're inserting more elements than exist already, and we're
266    // not inserting at the end.
267
268    // Copy over the elements that we're about to overwrite.
269    T *OldEnd = this->end();
270    this->setEnd(this->end() + NumToInsert);
271    size_t NumOverwritten = OldEnd-I;
272    this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
273
274    // Replace the overwritten part.
275    std::fill_n(I, NumOverwritten, Elt);
276
277    // Insert the non-overwritten middle part.
278    std::uninitialized_fill_n(OldEnd, NumToInsert-NumOverwritten, Elt);
279    return I;
280  }
281
282  template<typename ItTy>
283  iterator insert(ASTContext &C, iterator I, ItTy From, ItTy To) {
284    if (I == this->end()) {  // Important special case for empty vector.
285      append(C, From, To);
286      return this->end()-1;
287    }
288
289    size_t NumToInsert = std::distance(From, To);
290    // Convert iterator to elt# to avoid invalidating iterator when we reserve()
291    size_t InsertElt = I - this->begin();
292
293    // Ensure there is enough space.
294    reserve(C, static_cast<unsigned>(this->size() + NumToInsert));
295
296    // Uninvalidate the iterator.
297    I = this->begin()+InsertElt;
298
299    // If there are more elements between the insertion point and the end of the
300    // range than there are being inserted, we can use a simple approach to
301    // insertion.  Since we already reserved space, we know that this won't
302    // reallocate the vector.
303    if (size_t(this->end()-I) >= NumToInsert) {
304      T *OldEnd = this->end();
305      append(C, this->end()-NumToInsert, this->end());
306
307      // Copy the existing elements that get replaced.
308      std::copy_backward(I, OldEnd-NumToInsert, OldEnd);
309
310      std::copy(From, To, I);
311      return I;
312    }
313
314    // Otherwise, we're inserting more elements than exist already, and we're
315    // not inserting at the end.
316
317    // Copy over the elements that we're about to overwrite.
318    T *OldEnd = this->end();
319    this->setEnd(this->end() + NumToInsert);
320    size_t NumOverwritten = OldEnd-I;
321    this->uninitialized_copy(I, OldEnd, this->end()-NumOverwritten);
322
323    // Replace the overwritten part.
324    for (; NumOverwritten > 0; --NumOverwritten) {
325      *I = *From;
326      ++I; ++From;
327    }
328
329    // Insert the non-overwritten middle part.
330    this->uninitialized_copy(From, To, OldEnd);
331    return I;
332  }
333
334  void resize(ASTContext &C, unsigned N, const T &NV) {
335    if (N < this->size()) {
336      this->destroy_range(this->begin()+N, this->end());
337      this->setEnd(this->begin()+N);
338    } else if (N > this->size()) {
339      if (this->capacity() < N)
340        this->grow(C, N);
341      construct_range(this->end(), this->begin()+N, NV);
342      this->setEnd(this->begin()+N);
343    }
344  }
345
346private:
347  /// grow - double the size of the allocated memory, guaranteeing space for at
348  /// least one more element or MinSize if specified.
349  void grow(ASTContext &C, size_type MinSize = 1);
350
351  void construct_range(T *S, T *E, const T &Elt) {
352    for (; S != E; ++S)
353      new (S) T(Elt);
354  }
355
356  void destroy_range(T *S, T *E) {
357    while (S != E) {
358      --E;
359      E->~T();
360    }
361  }
362
363protected:
364  iterator capacity_ptr() { return (iterator)this->Capacity; }
365};
366
367// Define this out-of-line to dissuade the C++ compiler from inlining it.
368template <typename T>
369void ASTVector<T>::grow(ASTContext &C, size_t MinSize) {
370  size_t CurCapacity = Capacity-Begin;
371  size_t CurSize = size();
372  size_t NewCapacity = 2*CurCapacity;
373  if (NewCapacity < MinSize)
374    NewCapacity = MinSize;
375
376  // Allocate the memory from the ASTContext.
377  T *NewElts = new (C, llvm::alignOf<T>()) T[NewCapacity];
378
379  // Copy the elements over.
380  if (llvm::is_class<T>::value) {
381    std::uninitialized_copy(Begin, End, NewElts);
382    // Destroy the original elements.
383    destroy_range(Begin, End);
384  }
385  else {
386    // Use memcpy for PODs (std::uninitialized_copy optimizes to memmove).
387    memcpy(NewElts, Begin, CurSize * sizeof(T));
388  }
389
390  // ASTContext never frees any memory.
391  Begin = NewElts;
392  End = NewElts+CurSize;
393  Capacity = Begin+NewCapacity;
394}
395
396} // end: clang namespace
397#endif
398