1// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008-2009 Gael Guennebaud <gael.guennebaud@inria.fr>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
11#define EIGEN_DYNAMIC_SPARSEMATRIX_H
12
13namespace Eigen {
14
15/** \deprecated use a SparseMatrix in an uncompressed mode
16  *
17  * \class DynamicSparseMatrix
18  *
19  * \brief A sparse matrix class designed for matrix assembly purpose
20  *
21  * \param _Scalar the scalar type, i.e. the type of the coefficients
22  *
23  * Unlike SparseMatrix, this class provides a much higher degree of flexibility. In particular, it allows
24  * random read/write accesses in log(rho*outer_size) where \c rho is the probability that a coefficient is
25  * nonzero and outer_size is the number of columns if the matrix is column-major and the number of rows
26  * otherwise.
27  *
28  * Internally, the data are stored as a std::vector of compressed vector. The performances of random writes might
29  * decrease as the number of nonzeros per inner-vector increase. In practice, we observed very good performance
30  * till about 100 nonzeros/vector, and the performance remains relatively good till 500 nonzeros/vectors.
31  *
32  * \see SparseMatrix
33  */
34
35namespace internal {
36template<typename _Scalar, int _Options, typename _Index>
37struct traits<DynamicSparseMatrix<_Scalar, _Options, _Index> >
38{
39  typedef _Scalar Scalar;
40  typedef _Index Index;
41  typedef Sparse StorageKind;
42  typedef MatrixXpr XprKind;
43  enum {
44    RowsAtCompileTime = Dynamic,
45    ColsAtCompileTime = Dynamic,
46    MaxRowsAtCompileTime = Dynamic,
47    MaxColsAtCompileTime = Dynamic,
48    Flags = _Options | NestByRefBit | LvalueBit,
49    CoeffReadCost = NumTraits<Scalar>::ReadCost,
50    SupportedAccessPatterns = OuterRandomAccessPattern
51  };
52};
53}
54
55template<typename _Scalar, int _Options, typename _Index>
56 class  DynamicSparseMatrix
57  : public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _Index> >
58{
59  public:
60    EIGEN_SPARSE_PUBLIC_INTERFACE(DynamicSparseMatrix)
61    // FIXME: why are these operator already alvailable ???
62    // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, +=)
63    // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, -=)
64    typedef MappedSparseMatrix<Scalar,Flags> Map;
65    using Base::IsRowMajor;
66    using Base::operator=;
67    enum {
68      Options = _Options
69    };
70
71  protected:
72
73    typedef DynamicSparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix;
74
75    Index m_innerSize;
76    std::vector<internal::CompressedStorage<Scalar,Index> > m_data;
77
78  public:
79
80    inline Index rows() const { return IsRowMajor ? outerSize() : m_innerSize; }
81    inline Index cols() const { return IsRowMajor ? m_innerSize : outerSize(); }
82    inline Index innerSize() const { return m_innerSize; }
83    inline Index outerSize() const { return static_cast<Index>(m_data.size()); }
84    inline Index innerNonZeros(Index j) const { return m_data[j].size(); }
85
86    std::vector<internal::CompressedStorage<Scalar,Index> >& _data() { return m_data; }
87    const std::vector<internal::CompressedStorage<Scalar,Index> >& _data() const { return m_data; }
88
89    /** \returns the coefficient value at given position \a row, \a col
90      * This operation involes a log(rho*outer_size) binary search.
91      */
92    inline Scalar coeff(Index row, Index col) const
93    {
94      const Index outer = IsRowMajor ? row : col;
95      const Index inner = IsRowMajor ? col : row;
96      return m_data[outer].at(inner);
97    }
98
99    /** \returns a reference to the coefficient value at given position \a row, \a col
100      * This operation involes a log(rho*outer_size) binary search. If the coefficient does not
101      * exist yet, then a sorted insertion into a sequential buffer is performed.
102      */
103    inline Scalar& coeffRef(Index row, Index col)
104    {
105      const Index outer = IsRowMajor ? row : col;
106      const Index inner = IsRowMajor ? col : row;
107      return m_data[outer].atWithInsertion(inner);
108    }
109
110    class InnerIterator;
111    class ReverseInnerIterator;
112
113    void setZero()
114    {
115      for (Index j=0; j<outerSize(); ++j)
116        m_data[j].clear();
117    }
118
119    /** \returns the number of non zero coefficients */
120    Index nonZeros() const
121    {
122      Index res = 0;
123      for (Index j=0; j<outerSize(); ++j)
124        res += static_cast<Index>(m_data[j].size());
125      return res;
126    }
127
128
129
130    void reserve(Index reserveSize = 1000)
131    {
132      if (outerSize()>0)
133      {
134        Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),Index(4));
135        for (Index j=0; j<outerSize(); ++j)
136        {
137          m_data[j].reserve(reserveSizePerVector);
138        }
139      }
140    }
141
142    /** Does nothing: provided for compatibility with SparseMatrix */
143    inline void startVec(Index /*outer*/) {}
144
145    /** \returns a reference to the non zero coefficient at position \a row, \a col assuming that:
146      * - the nonzero does not already exist
147      * - the new coefficient is the last one of the given inner vector.
148      *
149      * \sa insert, insertBackByOuterInner */
150    inline Scalar& insertBack(Index row, Index col)
151    {
152      return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
153    }
154
155    /** \sa insertBack */
156    inline Scalar& insertBackByOuterInner(Index outer, Index inner)
157    {
158      eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize && "out of range");
159      eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner))
160                && "wrong sorted insertion");
161      m_data[outer].append(0, inner);
162      return m_data[outer].value(m_data[outer].size()-1);
163    }
164
165    inline Scalar& insert(Index row, Index col)
166    {
167      const Index outer = IsRowMajor ? row : col;
168      const Index inner = IsRowMajor ? col : row;
169
170      Index startId = 0;
171      Index id = static_cast<Index>(m_data[outer].size()) - 1;
172      m_data[outer].resize(id+2,1);
173
174      while ( (id >= startId) && (m_data[outer].index(id) > inner) )
175      {
176        m_data[outer].index(id+1) = m_data[outer].index(id);
177        m_data[outer].value(id+1) = m_data[outer].value(id);
178        --id;
179      }
180      m_data[outer].index(id+1) = inner;
181      m_data[outer].value(id+1) = 0;
182      return m_data[outer].value(id+1);
183    }
184
185    /** Does nothing: provided for compatibility with SparseMatrix */
186    inline void finalize() {}
187
188    /** Suppress all nonzeros which are smaller than \a reference under the tolerence \a epsilon */
189    void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
190    {
191      for (Index j=0; j<outerSize(); ++j)
192        m_data[j].prune(reference,epsilon);
193    }
194
195    /** Resize the matrix without preserving the data (the matrix is set to zero)
196      */
197    void resize(Index rows, Index cols)
198    {
199      const Index outerSize = IsRowMajor ? rows : cols;
200      m_innerSize = IsRowMajor ? cols : rows;
201      setZero();
202      if (Index(m_data.size()) != outerSize)
203      {
204        m_data.resize(outerSize);
205      }
206    }
207
208    void resizeAndKeepData(Index rows, Index cols)
209    {
210      const Index outerSize = IsRowMajor ? rows : cols;
211      const Index innerSize = IsRowMajor ? cols : rows;
212      if (m_innerSize>innerSize)
213      {
214        // remove all coefficients with innerCoord>=innerSize
215        // TODO
216        //std::cerr << "not implemented yet\n";
217        exit(2);
218      }
219      if (m_data.size() != outerSize)
220      {
221        m_data.resize(outerSize);
222      }
223    }
224
225    /** The class DynamicSparseMatrix is deprectaed */
226    EIGEN_DEPRECATED inline DynamicSparseMatrix()
227      : m_innerSize(0), m_data(0)
228    {
229      eigen_assert(innerSize()==0 && outerSize()==0);
230    }
231
232    /** The class DynamicSparseMatrix is deprectaed */
233    EIGEN_DEPRECATED inline DynamicSparseMatrix(Index rows, Index cols)
234      : m_innerSize(0)
235    {
236      resize(rows, cols);
237    }
238
239    /** The class DynamicSparseMatrix is deprectaed */
240    template<typename OtherDerived>
241    EIGEN_DEPRECATED explicit inline DynamicSparseMatrix(const SparseMatrixBase<OtherDerived>& other)
242      : m_innerSize(0)
243    {
244    Base::operator=(other.derived());
245    }
246
247    inline DynamicSparseMatrix(const DynamicSparseMatrix& other)
248      : Base(), m_innerSize(0)
249    {
250      *this = other.derived();
251    }
252
253    inline void swap(DynamicSparseMatrix& other)
254    {
255      //EIGEN_DBG_SPARSE(std::cout << "SparseMatrix:: swap\n");
256      std::swap(m_innerSize, other.m_innerSize);
257      //std::swap(m_outerSize, other.m_outerSize);
258      m_data.swap(other.m_data);
259    }
260
261    inline DynamicSparseMatrix& operator=(const DynamicSparseMatrix& other)
262    {
263      if (other.isRValue())
264      {
265        swap(other.const_cast_derived());
266      }
267      else
268      {
269        resize(other.rows(), other.cols());
270        m_data = other.m_data;
271      }
272      return *this;
273    }
274
275    /** Destructor */
276    inline ~DynamicSparseMatrix() {}
277
278  public:
279
280    /** \deprecated
281      * Set the matrix to zero and reserve the memory for \a reserveSize nonzero coefficients. */
282    EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
283    {
284      setZero();
285      reserve(reserveSize);
286    }
287
288    /** \deprecated use insert()
289      * inserts a nonzero coefficient at given coordinates \a row, \a col and returns its reference assuming that:
290      *  1 - the coefficient does not exist yet
291      *  2 - this the coefficient with greater inner coordinate for the given outer coordinate.
292      * In other words, assuming \c *this is column-major, then there must not exists any nonzero coefficient of coordinates
293      * \c i \c x \a col such that \c i >= \a row. Otherwise the matrix is invalid.
294      *
295      * \see fillrand(), coeffRef()
296      */
297    EIGEN_DEPRECATED Scalar& fill(Index row, Index col)
298    {
299      const Index outer = IsRowMajor ? row : col;
300      const Index inner = IsRowMajor ? col : row;
301      return insertBack(outer,inner);
302    }
303
304    /** \deprecated use insert()
305      * Like fill() but with random inner coordinates.
306      * Compared to the generic coeffRef(), the unique limitation is that we assume
307      * the coefficient does not exist yet.
308      */
309    EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col)
310    {
311      return insert(row,col);
312    }
313
314    /** \deprecated use finalize()
315      * Does nothing. Provided for compatibility with SparseMatrix. */
316    EIGEN_DEPRECATED void endFill() {}
317
318#   ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
319#     include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
320#   endif
321 };
322
323template<typename Scalar, int _Options, typename _Index>
324class DynamicSparseMatrix<Scalar,_Options,_Index>::InnerIterator : public SparseVector<Scalar,_Options,_Index>::InnerIterator
325{
326    typedef typename SparseVector<Scalar,_Options,_Index>::InnerIterator Base;
327  public:
328    InnerIterator(const DynamicSparseMatrix& mat, Index outer)
329      : Base(mat.m_data[outer]), m_outer(outer)
330    {}
331
332    inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
333    inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
334
335  protected:
336    const Index m_outer;
337};
338
339template<typename Scalar, int _Options, typename _Index>
340class DynamicSparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator : public SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
341{
342    typedef typename SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator Base;
343  public:
344    ReverseInnerIterator(const DynamicSparseMatrix& mat, Index outer)
345      : Base(mat.m_data[outer]), m_outer(outer)
346    {}
347
348    inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
349    inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
350
351  protected:
352    const Index m_outer;
353};
354
355} // end namespace Eigen
356
357#endif // EIGEN_DYNAMIC_SPARSEMATRIX_H
358