1// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_COMPRESSED_STORAGE_H
11#define EIGEN_COMPRESSED_STORAGE_H
12
13namespace Eigen {
14
15namespace internal {
16
17/** \internal
18  * Stores a sparse set of values as a list of values and a list of indices.
19  *
20  */
21template<typename _Scalar,typename _Index>
22class CompressedStorage
23{
24  public:
25
26    typedef _Scalar Scalar;
27    typedef _Index Index;
28
29  protected:
30
31    typedef typename NumTraits<Scalar>::Real RealScalar;
32
33  public:
34
35    CompressedStorage()
36      : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
37    {}
38
39    CompressedStorage(size_t size)
40      : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
41    {
42      resize(size);
43    }
44
45    CompressedStorage(const CompressedStorage& other)
46      : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
47    {
48      *this = other;
49    }
50
51    CompressedStorage& operator=(const CompressedStorage& other)
52    {
53      resize(other.size());
54      internal::smart_copy(other.m_values,  other.m_values  + m_size, m_values);
55      internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
56      return *this;
57    }
58
59    void swap(CompressedStorage& other)
60    {
61      std::swap(m_values, other.m_values);
62      std::swap(m_indices, other.m_indices);
63      std::swap(m_size, other.m_size);
64      std::swap(m_allocatedSize, other.m_allocatedSize);
65    }
66
67    ~CompressedStorage()
68    {
69      delete[] m_values;
70      delete[] m_indices;
71    }
72
73    void reserve(size_t size)
74    {
75      size_t newAllocatedSize = m_size + size;
76      if (newAllocatedSize > m_allocatedSize)
77        reallocate(newAllocatedSize);
78    }
79
80    void squeeze()
81    {
82      if (m_allocatedSize>m_size)
83        reallocate(m_size);
84    }
85
86    void resize(size_t size, double reserveSizeFactor = 0)
87    {
88      if (m_allocatedSize<size)
89        reallocate(size + size_t(reserveSizeFactor*double(size)));
90      m_size = size;
91    }
92
93    void append(const Scalar& v, Index i)
94    {
95      Index id = static_cast<Index>(m_size);
96      resize(m_size+1, 1);
97      m_values[id] = v;
98      m_indices[id] = i;
99    }
100
101    inline size_t size() const { return m_size; }
102    inline size_t allocatedSize() const { return m_allocatedSize; }
103    inline void clear() { m_size = 0; }
104
105    inline Scalar& value(size_t i) { return m_values[i]; }
106    inline const Scalar& value(size_t i) const { return m_values[i]; }
107
108    inline Index& index(size_t i) { return m_indices[i]; }
109    inline const Index& index(size_t i) const { return m_indices[i]; }
110
111    static CompressedStorage Map(Index* indices, Scalar* values, size_t size)
112    {
113      CompressedStorage res;
114      res.m_indices = indices;
115      res.m_values = values;
116      res.m_allocatedSize = res.m_size = size;
117      return res;
118    }
119
120    /** \returns the largest \c k such that for all \c j in [0,k) index[\c j]\<\a key */
121    inline Index searchLowerIndex(Index key) const
122    {
123      return searchLowerIndex(0, m_size, key);
124    }
125
126    /** \returns the largest \c k in [start,end) such that for all \c j in [start,k) index[\c j]\<\a key */
127    inline Index searchLowerIndex(size_t start, size_t end, Index key) const
128    {
129      while(end>start)
130      {
131        size_t mid = (end+start)>>1;
132        if (m_indices[mid]<key)
133          start = mid+1;
134        else
135          end = mid;
136      }
137      return static_cast<Index>(start);
138    }
139
140    /** \returns the stored value at index \a key
141      * If the value does not exist, then the value \a defaultValue is returned without any insertion. */
142    inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const
143    {
144      if (m_size==0)
145        return defaultValue;
146      else if (key==m_indices[m_size-1])
147        return m_values[m_size-1];
148      // ^^  optimization: let's first check if it is the last coefficient
149      // (very common in high level algorithms)
150      const size_t id = searchLowerIndex(0,m_size-1,key);
151      return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
152    }
153
154    /** Like at(), but the search is performed in the range [start,end) */
155    inline Scalar atInRange(size_t start, size_t end, Index key, const Scalar& defaultValue = Scalar(0)) const
156    {
157      if (start>=end)
158        return Scalar(0);
159      else if (end>start && key==m_indices[end-1])
160        return m_values[end-1];
161      // ^^  optimization: let's first check if it is the last coefficient
162      // (very common in high level algorithms)
163      const size_t id = searchLowerIndex(start,end-1,key);
164      return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
165    }
166
167    /** \returns a reference to the value at index \a key
168      * If the value does not exist, then the value \a defaultValue is inserted
169      * such that the keys are sorted. */
170    inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0))
171    {
172      size_t id = searchLowerIndex(0,m_size,key);
173      if (id>=m_size || m_indices[id]!=key)
174      {
175        resize(m_size+1,1);
176        for (size_t j=m_size-1; j>id; --j)
177        {
178          m_indices[j] = m_indices[j-1];
179          m_values[j] = m_values[j-1];
180        }
181        m_indices[id] = key;
182        m_values[id] = defaultValue;
183      }
184      return m_values[id];
185    }
186
187    void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
188    {
189      size_t k = 0;
190      size_t n = size();
191      for (size_t i=0; i<n; ++i)
192      {
193        if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
194        {
195          value(k) = value(i);
196          index(k) = index(i);
197          ++k;
198        }
199      }
200      resize(k,0);
201    }
202
203  protected:
204
205    inline void reallocate(size_t size)
206    {
207      Scalar* newValues  = new Scalar[size];
208      Index* newIndices = new Index[size];
209      size_t copySize = (std::min)(size, m_size);
210      // copy
211      internal::smart_copy(m_values, m_values+copySize, newValues);
212      internal::smart_copy(m_indices, m_indices+copySize, newIndices);
213      // delete old stuff
214      delete[] m_values;
215      delete[] m_indices;
216      m_values = newValues;
217      m_indices = newIndices;
218      m_allocatedSize = size;
219    }
220
221  protected:
222    Scalar* m_values;
223    Index* m_indices;
224    size_t m_size;
225    size_t m_allocatedSize;
226
227};
228
229} // end namespace internal
230
231} // end namespace Eigen
232
233#endif // EIGEN_COMPRESSED_STORAGE_H
234