1/*
2 *  Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
3 *
4 *  This library is free software; you can redistribute it and/or
5 *  modify it under the terms of the GNU Library General Public
6 *  License as published by the Free Software Foundation; either
7 *  version 2 of the License, or (at your option) any later version.
8 *
9 *  This library is distributed in the hope that it will be useful,
10 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
11 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 *  Library General Public License for more details.
13 *
14 *  You should have received a copy of the GNU Library General Public License
15 *  along with this library; see the file COPYING.LIB.  If not, write to
16 *  the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 *  Boston, MA 02110-1301, USA.
18 *
19 */
20
21#ifndef WTF_Vector_h
22#define WTF_Vector_h
23
24#include "wtf/Alignment.h"
25#include "wtf/DefaultAllocator.h"
26#include "wtf/FastAllocBase.h"
27#include "wtf/Noncopyable.h"
28#include "wtf/NotFound.h"
29#include "wtf/StdLibExtras.h"
30#include "wtf/VectorTraits.h"
31#include "wtf/WTF.h"
32#include <string.h>
33#include <utility>
34
35namespace WTF {
36
37#if defined(MEMORY_SANITIZER_INITIAL_SIZE)
38static const size_t kInitialVectorSize = 1;
39#else
40#ifndef WTF_VECTOR_INITIAL_SIZE
41#define WTF_VECTOR_INITIAL_SIZE 4
42#endif
43static const size_t kInitialVectorSize = WTF_VECTOR_INITIAL_SIZE;
44#endif
45
46    template<typename T, size_t inlineBuffer, typename Allocator>
47    class Deque;
48
49    template <bool needsDestruction, typename T>
50    struct VectorDestructor;
51
52    template<typename T>
53    struct VectorDestructor<false, T>
54    {
55        static void destruct(T*, T*) {}
56    };
57
58    template<typename T>
59    struct VectorDestructor<true, T>
60    {
61        static void destruct(T* begin, T* end)
62        {
63            for (T* cur = begin; cur != end; ++cur)
64                cur->~T();
65        }
66    };
67
68    template <bool unusedSlotsMustBeZeroed, typename T>
69    struct VectorUnusedSlotClearer;
70
71    template<typename T>
72    struct VectorUnusedSlotClearer<false, T> {
73        static void clear(T*, T*) { }
74    };
75
76    template<typename T>
77    struct VectorUnusedSlotClearer<true, T> {
78        static void clear(T* begin, T* end)
79        {
80            // We clear out unused slots so that the visitor and the finalizer
81            // do not visit them (or at least it does not matter if they do).
82            memset(begin, 0, sizeof(T) * (end - begin));
83        }
84    };
85
86    template <bool canInitializeWithMemset, typename T>
87    struct VectorInitializer;
88
89    template<typename T>
90    struct VectorInitializer<false, T>
91    {
92        static void initialize(T* begin, T* end)
93        {
94            for (T* cur = begin; cur != end; ++cur)
95                new (NotNull, cur) T;
96        }
97    };
98
99    template<typename T>
100    struct VectorInitializer<true, T>
101    {
102        static void initialize(T* begin, T* end)
103        {
104            memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
105        }
106    };
107
108    template <bool canMoveWithMemcpy, typename T>
109    struct VectorMover;
110
111    template<typename T>
112    struct VectorMover<false, T>
113    {
114        static void move(const T* src, const T* srcEnd, T* dst)
115        {
116            while (src != srcEnd) {
117                new (NotNull, dst) T(*src);
118                src->~T();
119                ++dst;
120                ++src;
121            }
122        }
123        static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
124        {
125            if (src > dst)
126                move(src, srcEnd, dst);
127            else {
128                T* dstEnd = dst + (srcEnd - src);
129                while (src != srcEnd) {
130                    --srcEnd;
131                    --dstEnd;
132                    new (NotNull, dstEnd) T(*srcEnd);
133                    srcEnd->~T();
134                }
135            }
136        }
137        static void swap(T* src, T* srcEnd, T* dst)
138        {
139            std::swap_ranges(src, srcEnd, dst);
140        }
141    };
142
143    template<typename T>
144    struct VectorMover<true, T>
145    {
146        static void move(const T* src, const T* srcEnd, T* dst)
147        {
148            memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
149        }
150        static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
151        {
152            memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
153        }
154        static void swap(T* src, T* srcEnd, T* dst)
155        {
156            std::swap_ranges(reinterpret_cast<char*>(src), reinterpret_cast<char*>(srcEnd), reinterpret_cast<char*>(dst));
157        }
158    };
159
160    template <bool canCopyWithMemcpy, typename T>
161    struct VectorCopier;
162
163    template<typename T>
164    struct VectorCopier<false, T>
165    {
166        template<typename U>
167        static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
168        {
169            while (src != srcEnd) {
170                new (NotNull, dst) T(*src);
171                ++dst;
172                ++src;
173            }
174        }
175    };
176
177    template<typename T>
178    struct VectorCopier<true, T>
179    {
180        static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
181        {
182            memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
183        }
184        template<typename U>
185        static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
186        {
187            VectorCopier<false, T>::uninitializedCopy(src, srcEnd, dst);
188        }
189    };
190
191    template <bool canFillWithMemset, typename T>
192    struct VectorFiller;
193
194    template<typename T>
195    struct VectorFiller<false, T>
196    {
197        static void uninitializedFill(T* dst, T* dstEnd, const T& val)
198        {
199            while (dst != dstEnd) {
200                new (NotNull, dst) T(val);
201                ++dst;
202            }
203        }
204    };
205
206    template<typename T>
207    struct VectorFiller<true, T>
208    {
209        static void uninitializedFill(T* dst, T* dstEnd, const T& val)
210        {
211            COMPILE_ASSERT(sizeof(T) == sizeof(char), Size_of_type_should_be_equal_to_one);
212#if COMPILER(GCC) && defined(_FORTIFY_SOURCE)
213            if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst)))
214#endif
215                memset(dst, val, dstEnd - dst);
216        }
217    };
218
219    template<bool canCompareWithMemcmp, typename T>
220    struct VectorComparer;
221
222    template<typename T>
223    struct VectorComparer<false, T>
224    {
225        static bool compare(const T* a, const T* b, size_t size)
226        {
227            if (LIKELY(a && b))
228                return std::equal(a, a + size, b);
229            return !a && !b;
230        }
231    };
232
233    template<typename T>
234    struct VectorComparer<true, T>
235    {
236        static bool compare(const T* a, const T* b, size_t size)
237        {
238            return memcmp(a, b, sizeof(T) * size) == 0;
239        }
240    };
241
242    template<typename T>
243    struct VectorTypeOperations
244    {
245        static void destruct(T* begin, T* end)
246        {
247            VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
248        }
249
250        static void initialize(T* begin, T* end)
251        {
252            VectorInitializer<VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
253        }
254
255        static void move(const T* src, const T* srcEnd, T* dst)
256        {
257            VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
258        }
259
260        static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
261        {
262            VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
263        }
264
265        static void swap(T* src, T* srcEnd, T* dst)
266        {
267            VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::swap(src, srcEnd, dst);
268        }
269
270        static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
271        {
272            VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
273        }
274
275        static void uninitializedFill(T* dst, T* dstEnd, const T& val)
276        {
277            VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
278        }
279
280        static bool compare(const T* a, const T* b, size_t size)
281        {
282            return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
283        }
284    };
285
286    template<typename T, typename Allocator>
287    class VectorBufferBase {
288        WTF_MAKE_NONCOPYABLE(VectorBufferBase);
289    public:
290        void allocateBuffer(size_t newCapacity)
291        {
292            typedef typename Allocator::template VectorBackingHelper<T, VectorTraits<T> >::Type VectorBacking;
293            ASSERT(newCapacity);
294            size_t sizeToAllocate = allocationSize(newCapacity);
295            m_buffer = Allocator::template backingMalloc<T*, VectorBacking>(sizeToAllocate);
296            m_capacity = sizeToAllocate / sizeof(T);
297        }
298
299        size_t allocationSize(size_t capacity) const
300        {
301            return Allocator::Quantizer::template quantizedSize<T>(capacity);
302        }
303
304        T* buffer() { return m_buffer; }
305        const T* buffer() const { return m_buffer; }
306        size_t capacity() const { return m_capacity; }
307
308        void clearUnusedSlots(T* from, T* to)
309        {
310            VectorUnusedSlotClearer<Allocator::isGarbageCollected && (VectorTraits<T>::needsDestruction || ShouldBeTraced<VectorTraits<T> >::value), T>::clear(from, to);
311        }
312
313    protected:
314        VectorBufferBase()
315            : m_buffer(0)
316            , m_capacity(0)
317        {
318        }
319
320        VectorBufferBase(T* buffer, size_t capacity)
321            : m_buffer(buffer)
322            , m_capacity(capacity)
323        {
324        }
325
326        T* m_buffer;
327        unsigned m_capacity;
328        unsigned m_size;
329    };
330
331    template<typename T, size_t inlineCapacity, typename Allocator = DefaultAllocator>
332    class VectorBuffer;
333
334    template<typename T, typename Allocator>
335    class VectorBuffer<T, 0, Allocator> : protected VectorBufferBase<T, Allocator> {
336    private:
337        typedef VectorBufferBase<T, Allocator> Base;
338    public:
339        VectorBuffer()
340        {
341        }
342
343        VectorBuffer(size_t capacity)
344        {
345            // Calling malloc(0) might take a lock and may actually do an
346            // allocation on some systems.
347            if (capacity)
348                allocateBuffer(capacity);
349        }
350
351        void destruct()
352        {
353            deallocateBuffer(m_buffer);
354            m_buffer = 0;
355        }
356
357        void deallocateBuffer(T* bufferToDeallocate)
358        {
359            Allocator::backingFree(bufferToDeallocate);
360        }
361
362        void resetBufferPointer()
363        {
364            m_buffer = 0;
365            m_capacity = 0;
366        }
367
368        void swapVectorBuffer(VectorBuffer<T, 0, Allocator>& other)
369        {
370            std::swap(m_buffer, other.m_buffer);
371            std::swap(m_capacity, other.m_capacity);
372        }
373
374        using Base::allocateBuffer;
375        using Base::allocationSize;
376
377        using Base::buffer;
378        using Base::capacity;
379
380        using Base::clearUnusedSlots;
381
382        bool hasOutOfLineBuffer() const
383        {
384            // When inlineCapacity is 0 we have an out of line buffer if we have a buffer.
385            return buffer();
386        }
387
388    protected:
389        using Base::m_size;
390
391    private:
392        using Base::m_buffer;
393        using Base::m_capacity;
394    };
395
396    template<typename T, size_t inlineCapacity, typename Allocator>
397    class VectorBuffer : protected VectorBufferBase<T, Allocator> {
398        WTF_MAKE_NONCOPYABLE(VectorBuffer);
399    private:
400        typedef VectorBufferBase<T, Allocator> Base;
401    public:
402        VectorBuffer()
403            : Base(inlineBuffer(), inlineCapacity)
404        {
405        }
406
407        VectorBuffer(size_t capacity)
408            : Base(inlineBuffer(), inlineCapacity)
409        {
410            if (capacity > inlineCapacity)
411                Base::allocateBuffer(capacity);
412        }
413
414        void destruct()
415        {
416            deallocateBuffer(m_buffer);
417            m_buffer = 0;
418        }
419
420        NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate)
421        {
422            Allocator::backingFree(bufferToDeallocate);
423        }
424
425        void deallocateBuffer(T* bufferToDeallocate)
426        {
427            if (UNLIKELY(bufferToDeallocate != inlineBuffer()))
428                reallyDeallocateBuffer(bufferToDeallocate);
429        }
430
431        void resetBufferPointer()
432        {
433            m_buffer = inlineBuffer();
434            m_capacity = inlineCapacity;
435        }
436
437        void allocateBuffer(size_t newCapacity)
438        {
439            // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks.
440            if (newCapacity > inlineCapacity)
441                Base::allocateBuffer(newCapacity);
442            else
443                resetBufferPointer();
444        }
445
446        size_t allocationSize(size_t capacity) const
447        {
448            if (capacity <= inlineCapacity)
449                return m_inlineBufferSize;
450            return Base::allocationSize(capacity);
451        }
452
453        void swapVectorBuffer(VectorBuffer<T, inlineCapacity, Allocator>& other)
454        {
455            typedef VectorTypeOperations<T> TypeOperations;
456
457            if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) {
458                ASSERT(m_capacity == other.m_capacity);
459                if (m_size > other.m_size) {
460                    TypeOperations::swap(inlineBuffer(), inlineBuffer() + other.m_size, other.inlineBuffer());
461                    TypeOperations::move(inlineBuffer() + other.m_size, inlineBuffer() + m_size, other.inlineBuffer() + other.m_size);
462                } else {
463                    TypeOperations::swap(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
464                    TypeOperations::move(other.inlineBuffer() + m_size, other.inlineBuffer() + other.m_size, inlineBuffer() + m_size);
465                }
466            } else if (buffer() == inlineBuffer()) {
467                m_buffer = other.m_buffer;
468                other.m_buffer = other.inlineBuffer();
469                TypeOperations::move(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
470                std::swap(m_capacity, other.m_capacity);
471            } else if (other.buffer() == other.inlineBuffer()) {
472                other.m_buffer = m_buffer;
473                m_buffer = inlineBuffer();
474                TypeOperations::move(other.inlineBuffer(), other.inlineBuffer() + other.m_size, inlineBuffer());
475                std::swap(m_capacity, other.m_capacity);
476            } else {
477                std::swap(m_buffer, other.m_buffer);
478                std::swap(m_capacity, other.m_capacity);
479            }
480        }
481
482        using Base::buffer;
483        using Base::capacity;
484
485        bool hasOutOfLineBuffer() const
486        {
487            return buffer() && buffer() != inlineBuffer();
488        }
489
490    protected:
491        using Base::m_size;
492
493    private:
494        using Base::m_buffer;
495        using Base::m_capacity;
496
497        static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
498        T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); }
499        const T* inlineBuffer() const { return reinterpret_cast_ptr<const T*>(m_inlineBuffer.buffer); }
500
501        AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
502        template<typename U, size_t inlineBuffer, typename V>
503        friend class Deque;
504    };
505
506    template<typename T, size_t inlineCapacity, typename Allocator>
507    class Vector;
508
509    // VectorDestructorBase defines the destructor of a vector. This base is used in order to
510    // completely avoid creating a destructor for a vector that does not need to be destructed.
511    // By doing so, the clang compiler will have correct information about whether or not a
512    // vector has a trivial destructor and we use that in a compiler plugin to ensure the
513    // correctness of non-finalized garbage-collected classes and the use of VectorTraits::needsDestruction.
514
515    // All non-GC managed vectors need a destructor. This destructor will simply call finalize on the actual vector type.
516    template<typename Derived, typename Elements, bool hasInlineCapacity, bool isGarbageCollected>
517    class VectorDestructorBase {
518    public:
519        ~VectorDestructorBase() { static_cast<Derived*>(this)->finalize(); }
520    };
521
522    // Heap-allocated vectors with no inlineCapacity never need a destructor.
523    template<typename Derived, typename Elements>
524    class VectorDestructorBase<Derived, Elements, false, true> { };
525
526    // Heap-allocator vectors with inlineCapacity need a destructor if the inline elements do.
527    // The use of VectorTraits<Elements>::needsDestruction is delayed until we know that
528    // inlineCapacity is non-zero to allow classes that recursively refer to themselves in vector
529    // members. If inlineCapacity is non-zero doing so would have undefined meaning, so in this
530    // case we can use HeapVectorWithInlineCapacityDestructorBase to define a destructor
531    // depending on the value of VectorTraits<Elements>::needsDestruction.
532    template<typename Derived, bool elementsNeedsDestruction>
533    class HeapVectorWithInlineCapacityDestructorBase;
534
535    template<typename Derived>
536    class HeapVectorWithInlineCapacityDestructorBase<Derived, true> {
537    public:
538        ~HeapVectorWithInlineCapacityDestructorBase() { static_cast<Derived*>(this)->finalize(); }
539    };
540
541    template<typename Derived>
542    class HeapVectorWithInlineCapacityDestructorBase<Derived, false> { };
543
544    template<typename Derived, typename Elements>
545    class VectorDestructorBase<Derived, Elements, true, true> : public HeapVectorWithInlineCapacityDestructorBase<Derived, VectorTraits<Elements>::needsDestruction> { };
546
547    template<typename T, size_t inlineCapacity = 0, typename Allocator = DefaultAllocator>
548    class Vector : private VectorBuffer<T, inlineCapacity, Allocator>, public VectorDestructorBase<Vector<T, inlineCapacity, Allocator>, T, (inlineCapacity > 0), Allocator::isGarbageCollected> {
549        WTF_USE_ALLOCATOR(Vector, Allocator);
550    private:
551        typedef VectorBuffer<T, inlineCapacity, Allocator> Base;
552        typedef VectorTypeOperations<T> TypeOperations;
553
554    public:
555        typedef T ValueType;
556
557        typedef T* iterator;
558        typedef const T* const_iterator;
559        typedef std::reverse_iterator<iterator> reverse_iterator;
560        typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
561
562        Vector()
563        {
564            // Unused slots are initialized to zero so that the visitor and the
565            // finalizer can visit them safely. canInitializeWithMemset tells us
566            // that the class does not expect matching constructor and
567            // destructor calls as long as the memory is zeroed.
568            COMPILE_ASSERT(!Allocator::isGarbageCollected || !VectorTraits<T>::needsDestruction || VectorTraits<T>::canInitializeWithMemset, ClassHasProblemsWithFinalizersCalledOnClearedMemory);
569            COMPILE_ASSERT(!WTF::IsPolymorphic<T>::value || !VectorTraits<T>::canInitializeWithMemset, CantInitializeWithMemsetIfThereIsAVtable);
570            m_size = 0;
571        }
572
573        explicit Vector(size_t size)
574            : Base(size)
575        {
576            // Unused slots are initialized to zero so that the visitor and the
577            // finalizer can visit them safely. canInitializeWithMemset tells us
578            // that the class does not expect matching constructor and
579            // destructor calls as long as the memory is zeroed.
580            COMPILE_ASSERT(!Allocator::isGarbageCollected || !VectorTraits<T>::needsDestruction || VectorTraits<T>::canInitializeWithMemset, ClassHasProblemsWithFinalizersCalledOnClearedMemory);
581            m_size = size;
582            TypeOperations::initialize(begin(), end());
583        }
584
585        // Off-GC-heap vectors: Destructor should be called.
586        // On-GC-heap vectors: Destructor should be called for inline buffers
587        // (if any) but destructor shouldn't be called for vector backing since
588        // it is managed by the traced GC heap.
589        void finalize()
590        {
591            if (!inlineCapacity) {
592                if (LIKELY(!Base::buffer()))
593                    return;
594            }
595            if (LIKELY(m_size) && !(Allocator::isGarbageCollected && this->hasOutOfLineBuffer())) {
596                TypeOperations::destruct(begin(), end());
597                m_size = 0; // Partial protection against use-after-free.
598            }
599
600            Base::destruct();
601        }
602
603        void finalizeGarbageCollectedObject()
604        {
605            finalize();
606        }
607
608        Vector(const Vector&);
609        template<size_t otherCapacity>
610        explicit Vector(const Vector<T, otherCapacity, Allocator>&);
611
612        Vector& operator=(const Vector&);
613        template<size_t otherCapacity>
614        Vector& operator=(const Vector<T, otherCapacity, Allocator>&);
615
616#if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES)
617        Vector(Vector&&);
618        Vector& operator=(Vector&&);
619#endif
620
621        size_t size() const { return m_size; }
622        size_t capacity() const { return Base::capacity(); }
623        bool isEmpty() const { return !size(); }
624
625        T& at(size_t i)
626        {
627            RELEASE_ASSERT(i < size());
628            return Base::buffer()[i];
629        }
630        const T& at(size_t i) const
631        {
632            RELEASE_ASSERT(i < size());
633            return Base::buffer()[i];
634        }
635
636        T& operator[](size_t i) { return at(i); }
637        const T& operator[](size_t i) const { return at(i); }
638
639        T* data() { return Base::buffer(); }
640        const T* data() const { return Base::buffer(); }
641
642        iterator begin() { return data(); }
643        iterator end() { return begin() + m_size; }
644        const_iterator begin() const { return data(); }
645        const_iterator end() const { return begin() + m_size; }
646
647        reverse_iterator rbegin() { return reverse_iterator(end()); }
648        reverse_iterator rend() { return reverse_iterator(begin()); }
649        const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); }
650        const_reverse_iterator rend() const { return const_reverse_iterator(begin()); }
651
652        T& first() { return at(0); }
653        const T& first() const { return at(0); }
654        T& last() { return at(size() - 1); }
655        const T& last() const { return at(size() - 1); }
656
657        template<typename U> bool contains(const U&) const;
658        template<typename U> size_t find(const U&) const;
659        template<typename U> size_t reverseFind(const U&) const;
660
661        void shrink(size_t size);
662        void grow(size_t size);
663        void resize(size_t size);
664        void reserveCapacity(size_t newCapacity);
665        void reserveInitialCapacity(size_t initialCapacity);
666        void shrinkToFit() { shrinkCapacity(size()); }
667        void shrinkToReasonableCapacity()
668        {
669            if (size() * 2 < capacity())
670                shrinkCapacity(size() + size() / 4 + 1);
671        }
672
673        void clear() { shrinkCapacity(0); }
674
675        template<typename U> void append(const U*, size_t);
676        template<typename U> void append(const U&);
677        template<typename U> void uncheckedAppend(const U& val);
678        template<typename U, size_t otherCapacity, typename V> void appendVector(const Vector<U, otherCapacity, V>&);
679
680        template<typename U> void insert(size_t position, const U*, size_t);
681        template<typename U> void insert(size_t position, const U&);
682        template<typename U, size_t c, typename V> void insert(size_t position, const Vector<U, c, V>&);
683
684        template<typename U> void prepend(const U*, size_t);
685        template<typename U> void prepend(const U&);
686        template<typename U, size_t c, typename V> void prepend(const Vector<U, c, V>&);
687
688        void remove(size_t position);
689        void remove(size_t position, size_t length);
690
691        void removeLast()
692        {
693            ASSERT(!isEmpty());
694            shrink(size() - 1);
695        }
696
697        Vector(size_t size, const T& val)
698            : Base(size)
699        {
700            m_size = size;
701            TypeOperations::uninitializedFill(begin(), end(), val);
702        }
703
704        void fill(const T&, size_t);
705        void fill(const T& val) { fill(val, size()); }
706
707        template<typename Iterator> void appendRange(Iterator start, Iterator end);
708
709        void swap(Vector& other)
710        {
711            Base::swapVectorBuffer(other);
712            std::swap(m_size, other.m_size);
713        }
714
715        void reverse();
716
717        void trace(typename Allocator::Visitor*);
718
719    private:
720        void expandCapacity(size_t newMinCapacity);
721        const T* expandCapacity(size_t newMinCapacity, const T*);
722        template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
723        void shrinkCapacity(size_t newCapacity);
724        template<typename U> void appendSlowCase(const U&);
725
726        using Base::m_size;
727        using Base::buffer;
728        using Base::capacity;
729        using Base::swapVectorBuffer;
730        using Base::allocateBuffer;
731        using Base::allocationSize;
732        using Base::clearUnusedSlots;
733    };
734
735    template<typename T, size_t inlineCapacity, typename Allocator>
736    Vector<T, inlineCapacity, Allocator>::Vector(const Vector& other)
737        : Base(other.capacity())
738    {
739        m_size = other.size();
740        TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
741    }
742
743    template<typename T, size_t inlineCapacity, typename Allocator>
744    template<size_t otherCapacity>
745    Vector<T, inlineCapacity, Allocator>::Vector(const Vector<T, otherCapacity, Allocator>& other)
746        : Base(other.capacity())
747    {
748        m_size = other.size();
749        TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
750    }
751
752    template<typename T, size_t inlineCapacity, typename Allocator>
753    Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, inlineCapacity, Allocator>& other)
754    {
755        if (UNLIKELY(&other == this))
756            return *this;
757
758        if (size() > other.size())
759            shrink(other.size());
760        else if (other.size() > capacity()) {
761            clear();
762            reserveCapacity(other.size());
763            ASSERT(begin());
764        }
765
766        std::copy(other.begin(), other.begin() + size(), begin());
767        TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
768        m_size = other.size();
769
770        return *this;
771    }
772
773    inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; }
774
775    template<typename T, size_t inlineCapacity, typename Allocator>
776    template<size_t otherCapacity>
777    Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, otherCapacity, Allocator>& other)
778    {
779        // If the inline capacities match, we should call the more specific
780        // template.  If the inline capacities don't match, the two objects
781        // shouldn't be allocated the same address.
782        ASSERT(!typelessPointersAreEqual(&other, this));
783
784        if (size() > other.size())
785            shrink(other.size());
786        else if (other.size() > capacity()) {
787            clear();
788            reserveCapacity(other.size());
789            ASSERT(begin());
790        }
791
792        std::copy(other.begin(), other.begin() + size(), begin());
793        TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
794        m_size = other.size();
795
796        return *this;
797    }
798
799#if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES)
800    template<typename T, size_t inlineCapacity, typename Allocator>
801    Vector<T, inlineCapacity, Allocator>::Vector(Vector<T, inlineCapacity, Allocator>&& other)
802    {
803        m_size = 0;
804        // It's a little weird to implement a move constructor using swap but this way we
805        // don't have to add a move constructor to VectorBuffer.
806        swap(other);
807    }
808
809    template<typename T, size_t inlineCapacity, typename Allocator>
810    Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(Vector<T, inlineCapacity, Allocator>&& other)
811    {
812        swap(other);
813        return *this;
814    }
815#endif
816
817    template<typename T, size_t inlineCapacity, typename Allocator>
818    template<typename U>
819    bool Vector<T, inlineCapacity, Allocator>::contains(const U& value) const
820    {
821        return find(value) != kNotFound;
822    }
823
824    template<typename T, size_t inlineCapacity, typename Allocator>
825    template<typename U>
826    size_t Vector<T, inlineCapacity, Allocator>::find(const U& value) const
827    {
828        const T* b = begin();
829        const T* e = end();
830        for (const T* iter = b; iter < e; ++iter) {
831            if (*iter == value)
832                return iter - b;
833        }
834        return kNotFound;
835    }
836
837    template<typename T, size_t inlineCapacity, typename Allocator>
838    template<typename U>
839    size_t Vector<T, inlineCapacity, Allocator>::reverseFind(const U& value) const
840    {
841        const T* b = begin();
842        const T* iter = end();
843        while (iter > b) {
844            --iter;
845            if (*iter == value)
846                return iter - b;
847        }
848        return kNotFound;
849    }
850
851    template<typename T, size_t inlineCapacity, typename Allocator>
852    void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize)
853    {
854        if (size() > newSize)
855            shrink(newSize);
856        else if (newSize > capacity()) {
857            clear();
858            reserveCapacity(newSize);
859            ASSERT(begin());
860        }
861
862        std::fill(begin(), end(), val);
863        TypeOperations::uninitializedFill(end(), begin() + newSize, val);
864        m_size = newSize;
865    }
866
867    template<typename T, size_t inlineCapacity, typename Allocator>
868    template<typename Iterator>
869    void Vector<T, inlineCapacity, Allocator>::appendRange(Iterator start, Iterator end)
870    {
871        for (Iterator it = start; it != end; ++it)
872            append(*it);
873    }
874
875    template<typename T, size_t inlineCapacity, typename Allocator>
876    void Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity)
877    {
878        size_t oldCapacity = capacity();
879        size_t expandedCapacity = oldCapacity;
880        // We use a more aggressive expansion strategy for Vectors with inline storage.
881        // This is because they are more likely to be on the stack, so the risk of heap bloat is minimized.
882        // Furthermore, exceeding the inline capacity limit is not supposed to happen in the common case and may indicate a pathological condition or microbenchmark.
883        if (inlineCapacity) {
884            expandedCapacity *= 2;
885            // Check for integer overflow, which could happen in the 32-bit build.
886            RELEASE_ASSERT(expandedCapacity > oldCapacity);
887        } else {
888            // This cannot integer overflow.
889            // On 64-bit, the "expanded" integer is 32-bit, and any encroachment above 2^32 will fail allocation in allocateBuffer().
890            // On 32-bit, there's not enough address space to hold the old and new buffers.
891            // In addition, our underlying allocator is supposed to always fail on > (2^31 - 1) allocations.
892            expandedCapacity += (expandedCapacity / 4) + 1;
893        }
894        reserveCapacity(std::max(newMinCapacity, std::max(static_cast<size_t>(kInitialVectorSize), expandedCapacity)));
895    }
896
897    template<typename T, size_t inlineCapacity, typename Allocator>
898    const T* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, const T* ptr)
899    {
900        if (ptr < begin() || ptr >= end()) {
901            expandCapacity(newMinCapacity);
902            return ptr;
903        }
904        size_t index = ptr - begin();
905        expandCapacity(newMinCapacity);
906        return begin() + index;
907    }
908
909    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
910    inline U* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, U* ptr)
911    {
912        expandCapacity(newMinCapacity);
913        return ptr;
914    }
915
916    template<typename T, size_t inlineCapacity, typename Allocator>
917    inline void Vector<T, inlineCapacity, Allocator>::resize(size_t size)
918    {
919        if (size <= m_size)
920            TypeOperations::destruct(begin() + size, end());
921        else {
922            if (size > capacity())
923                expandCapacity(size);
924            TypeOperations::initialize(end(), begin() + size);
925        }
926
927        m_size = size;
928    }
929
930    template<typename T, size_t inlineCapacity, typename Allocator>
931    void Vector<T, inlineCapacity, Allocator>::shrink(size_t size)
932    {
933        ASSERT(size <= m_size);
934        TypeOperations::destruct(begin() + size, end());
935        clearUnusedSlots(begin() + size, end());
936        m_size = size;
937    }
938
939    template<typename T, size_t inlineCapacity, typename Allocator>
940    void Vector<T, inlineCapacity, Allocator>::grow(size_t size)
941    {
942        ASSERT(size >= m_size);
943        if (size > capacity())
944            expandCapacity(size);
945        TypeOperations::initialize(end(), begin() + size);
946        m_size = size;
947    }
948
949    template<typename T, size_t inlineCapacity, typename Allocator>
950    void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity)
951    {
952        if (UNLIKELY(newCapacity <= capacity()))
953            return;
954        T* oldBuffer = begin();
955        T* oldEnd = end();
956        Base::allocateBuffer(newCapacity);
957        TypeOperations::move(oldBuffer, oldEnd, begin());
958        Base::deallocateBuffer(oldBuffer);
959    }
960
961    template<typename T, size_t inlineCapacity, typename Allocator>
962    inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity(size_t initialCapacity)
963    {
964        ASSERT(!m_size);
965        ASSERT(capacity() == inlineCapacity);
966        if (initialCapacity > inlineCapacity)
967            Base::allocateBuffer(initialCapacity);
968    }
969
970    template<typename T, size_t inlineCapacity, typename Allocator>
971    void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity)
972    {
973        if (newCapacity >= capacity())
974            return;
975
976        if (newCapacity < size())
977            shrink(newCapacity);
978
979        T* oldBuffer = begin();
980        if (newCapacity > 0) {
981            // Optimization: if we're downsizing inside the same allocator bucket, we can exit with no additional work.
982            if (Base::allocationSize(capacity()) == Base::allocationSize(newCapacity))
983                return;
984
985            T* oldEnd = end();
986            Base::allocateBuffer(newCapacity);
987            if (begin() != oldBuffer)
988                TypeOperations::move(oldBuffer, oldEnd, begin());
989        } else {
990            Base::resetBufferPointer();
991        }
992
993        Base::deallocateBuffer(oldBuffer);
994    }
995
996    // Templatizing these is better than just letting the conversion happen implicitly,
997    // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
998    // without refcount thrash.
999
1000    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1001    void Vector<T, inlineCapacity, Allocator>::append(const U* data, size_t dataSize)
1002    {
1003        ASSERT(Allocator::isAllocationAllowed());
1004        size_t newSize = m_size + dataSize;
1005        if (newSize > capacity()) {
1006            data = expandCapacity(newSize, data);
1007            ASSERT(begin());
1008        }
1009        RELEASE_ASSERT(newSize >= m_size);
1010        T* dest = end();
1011        VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], dest);
1012        m_size = newSize;
1013    }
1014
1015    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1016    ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(const U& val)
1017    {
1018        ASSERT(Allocator::isAllocationAllowed());
1019        if (LIKELY(size() != capacity())) {
1020            new (NotNull, end()) T(val);
1021            ++m_size;
1022            return;
1023        }
1024
1025        appendSlowCase(val);
1026    }
1027
1028    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1029    NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase(const U& val)
1030    {
1031        ASSERT(size() == capacity());
1032
1033        const U* ptr = &val;
1034        ptr = expandCapacity(size() + 1, ptr);
1035        ASSERT(begin());
1036
1037        new (NotNull, end()) T(*ptr);
1038        ++m_size;
1039    }
1040
1041    // This version of append saves a branch in the case where you know that the
1042    // vector's capacity is large enough for the append to succeed.
1043
1044    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1045    ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend(const U& val)
1046    {
1047        ASSERT(size() < capacity());
1048        const U* ptr = &val;
1049        new (NotNull, end()) T(*ptr);
1050        ++m_size;
1051    }
1052
1053    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t otherCapacity, typename OtherAllocator>
1054    inline void Vector<T, inlineCapacity, Allocator>::appendVector(const Vector<U, otherCapacity, OtherAllocator>& val)
1055    {
1056        append(val.begin(), val.size());
1057    }
1058
1059    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1060    void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U* data, size_t dataSize)
1061    {
1062        ASSERT(Allocator::isAllocationAllowed());
1063        RELEASE_ASSERT(position <= size());
1064        size_t newSize = m_size + dataSize;
1065        if (newSize > capacity()) {
1066            data = expandCapacity(newSize, data);
1067            ASSERT(begin());
1068        }
1069        RELEASE_ASSERT(newSize >= m_size);
1070        T* spot = begin() + position;
1071        TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
1072        VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], spot);
1073        m_size = newSize;
1074    }
1075
1076    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1077    inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U& val)
1078    {
1079        ASSERT(Allocator::isAllocationAllowed());
1080        RELEASE_ASSERT(position <= size());
1081        const U* data = &val;
1082        if (size() == capacity()) {
1083            data = expandCapacity(size() + 1, data);
1084            ASSERT(begin());
1085        }
1086        T* spot = begin() + position;
1087        TypeOperations::moveOverlapping(spot, end(), spot + 1);
1088        new (NotNull, spot) T(*data);
1089        ++m_size;
1090    }
1091
1092    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename OtherAllocator>
1093    inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const Vector<U, c, OtherAllocator>& val)
1094    {
1095        insert(position, val.begin(), val.size());
1096    }
1097
1098    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1099    void Vector<T, inlineCapacity, Allocator>::prepend(const U* data, size_t dataSize)
1100    {
1101        insert(0, data, dataSize);
1102    }
1103
1104    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1105    inline void Vector<T, inlineCapacity, Allocator>::prepend(const U& val)
1106    {
1107        insert(0, val);
1108    }
1109
1110    template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename V>
1111    inline void Vector<T, inlineCapacity, Allocator>::prepend(const Vector<U, c, V>& val)
1112    {
1113        insert(0, val.begin(), val.size());
1114    }
1115
1116    template<typename T, size_t inlineCapacity, typename Allocator>
1117    inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position)
1118    {
1119        RELEASE_ASSERT(position < size());
1120        T* spot = begin() + position;
1121        spot->~T();
1122        TypeOperations::moveOverlapping(spot + 1, end(), spot);
1123        clearUnusedSlots(end() - 1, end());
1124        --m_size;
1125    }
1126
1127    template<typename T, size_t inlineCapacity, typename Allocator>
1128    inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position, size_t length)
1129    {
1130        ASSERT_WITH_SECURITY_IMPLICATION(position <= size());
1131        RELEASE_ASSERT(position + length <= size());
1132        T* beginSpot = begin() + position;
1133        T* endSpot = beginSpot + length;
1134        TypeOperations::destruct(beginSpot, endSpot);
1135        TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
1136        clearUnusedSlots(end() - length, end());
1137        m_size -= length;
1138    }
1139
1140    template<typename T, size_t inlineCapacity, typename Allocator>
1141    inline void Vector<T, inlineCapacity, Allocator>::reverse()
1142    {
1143        for (size_t i = 0; i < m_size / 2; ++i)
1144            std::swap(at(i), at(m_size - 1 - i));
1145    }
1146
1147    template<typename T, size_t inlineCapacity, typename Allocator>
1148    void deleteAllValues(const Vector<T, inlineCapacity, Allocator>& collection)
1149    {
1150        typedef typename Vector<T, inlineCapacity, Allocator>::const_iterator iterator;
1151        iterator end = collection.end();
1152        for (iterator it = collection.begin(); it != end; ++it)
1153            delete *it;
1154    }
1155
1156    template<typename T, size_t inlineCapacity, typename Allocator>
1157    inline void swap(Vector<T, inlineCapacity, Allocator>& a, Vector<T, inlineCapacity, Allocator>& b)
1158    {
1159        a.swap(b);
1160    }
1161
1162    template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
1163    bool operator==(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
1164    {
1165        if (a.size() != b.size())
1166            return false;
1167
1168        return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
1169    }
1170
1171    template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
1172    inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
1173    {
1174        return !(a == b);
1175    }
1176
1177    // This is only called if the allocator is a HeapAllocator. It is used when
1178    // visiting during a tracing GC.
1179    template<typename T, size_t inlineCapacity, typename Allocator>
1180    void Vector<T, inlineCapacity, Allocator>::trace(typename Allocator::Visitor* visitor)
1181    {
1182        ASSERT(Allocator::isGarbageCollected); // Garbage collector must be enabled.
1183        const T* bufferBegin = buffer();
1184        const T* bufferEnd = buffer() + size();
1185        if (ShouldBeTraced<VectorTraits<T> >::value) {
1186            for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; bufferEntry++)
1187                Allocator::template trace<T, VectorTraits<T> >(visitor, *const_cast<T*>(bufferEntry));
1188        }
1189        if (this->hasOutOfLineBuffer())
1190            Allocator::markNoTracing(visitor, buffer());
1191    }
1192
1193#if !ENABLE(OILPAN)
1194    template<typename T, size_t N>
1195    struct NeedsTracing<Vector<T, N> > {
1196        static const bool value = false;
1197    };
1198#endif
1199
1200} // namespace WTF
1201
1202using WTF::Vector;
1203
1204#endif // WTF_Vector_h
1205