blob: 0ffbc43d23e931d1d542b51d56dfd20a7775da03 [file] [log] [blame]
Brian Silverman72890c22015-09-19 14:37:37 -04001// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008-2009 Gael Guennebaud <gael.guennebaud@inria.fr>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
11#define EIGEN_DYNAMIC_SPARSEMATRIX_H
12
13namespace Eigen {
14
15/** \deprecated use a SparseMatrix in an uncompressed mode
16 *
17 * \class DynamicSparseMatrix
18 *
19 * \brief A sparse matrix class designed for matrix assembly purpose
20 *
21 * \param _Scalar the scalar type, i.e. the type of the coefficients
22 *
23 * Unlike SparseMatrix, this class provides a much higher degree of flexibility. In particular, it allows
24 * random read/write accesses in log(rho*outer_size) where \c rho is the probability that a coefficient is
25 * nonzero and outer_size is the number of columns if the matrix is column-major and the number of rows
26 * otherwise.
27 *
28 * Internally, the data are stored as a std::vector of compressed vector. The performances of random writes might
29 * decrease as the number of nonzeros per inner-vector increase. In practice, we observed very good performance
30 * till about 100 nonzeros/vector, and the performance remains relatively good till 500 nonzeros/vectors.
31 *
32 * \see SparseMatrix
33 */
34
35namespace internal {
Austin Schuh189376f2018-12-20 22:11:15 +110036template<typename _Scalar, int _Options, typename _StorageIndex>
37struct traits<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
Brian Silverman72890c22015-09-19 14:37:37 -040038{
39 typedef _Scalar Scalar;
Austin Schuh189376f2018-12-20 22:11:15 +110040 typedef _StorageIndex StorageIndex;
Brian Silverman72890c22015-09-19 14:37:37 -040041 typedef Sparse StorageKind;
42 typedef MatrixXpr XprKind;
43 enum {
44 RowsAtCompileTime = Dynamic,
45 ColsAtCompileTime = Dynamic,
46 MaxRowsAtCompileTime = Dynamic,
47 MaxColsAtCompileTime = Dynamic,
48 Flags = _Options | NestByRefBit | LvalueBit,
49 CoeffReadCost = NumTraits<Scalar>::ReadCost,
50 SupportedAccessPatterns = OuterRandomAccessPattern
51 };
52};
53}
54
Austin Schuh189376f2018-12-20 22:11:15 +110055template<typename _Scalar, int _Options, typename _StorageIndex>
Brian Silverman72890c22015-09-19 14:37:37 -040056 class DynamicSparseMatrix
Austin Schuh189376f2018-12-20 22:11:15 +110057 : public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
Brian Silverman72890c22015-09-19 14:37:37 -040058{
Austin Schuh189376f2018-12-20 22:11:15 +110059 typedef SparseMatrixBase<DynamicSparseMatrix> Base;
60 using Base::convert_index;
Brian Silverman72890c22015-09-19 14:37:37 -040061 public:
62 EIGEN_SPARSE_PUBLIC_INTERFACE(DynamicSparseMatrix)
63 // FIXME: why are these operator already alvailable ???
64 // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, +=)
65 // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, -=)
66 typedef MappedSparseMatrix<Scalar,Flags> Map;
67 using Base::IsRowMajor;
68 using Base::operator=;
69 enum {
70 Options = _Options
71 };
72
73 protected:
74
Austin Schuh189376f2018-12-20 22:11:15 +110075 typedef DynamicSparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0), StorageIndex> TransposedSparseMatrix;
Brian Silverman72890c22015-09-19 14:37:37 -040076
77 Index m_innerSize;
Austin Schuh189376f2018-12-20 22:11:15 +110078 std::vector<internal::CompressedStorage<Scalar,StorageIndex> > m_data;
Brian Silverman72890c22015-09-19 14:37:37 -040079
80 public:
81
82 inline Index rows() const { return IsRowMajor ? outerSize() : m_innerSize; }
83 inline Index cols() const { return IsRowMajor ? m_innerSize : outerSize(); }
84 inline Index innerSize() const { return m_innerSize; }
Austin Schuh189376f2018-12-20 22:11:15 +110085 inline Index outerSize() const { return convert_index(m_data.size()); }
Brian Silverman72890c22015-09-19 14:37:37 -040086 inline Index innerNonZeros(Index j) const { return m_data[j].size(); }
87
Austin Schuh189376f2018-12-20 22:11:15 +110088 std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() { return m_data; }
89 const std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() const { return m_data; }
Brian Silverman72890c22015-09-19 14:37:37 -040090
91 /** \returns the coefficient value at given position \a row, \a col
92 * This operation involes a log(rho*outer_size) binary search.
93 */
94 inline Scalar coeff(Index row, Index col) const
95 {
96 const Index outer = IsRowMajor ? row : col;
97 const Index inner = IsRowMajor ? col : row;
98 return m_data[outer].at(inner);
99 }
100
101 /** \returns a reference to the coefficient value at given position \a row, \a col
102 * This operation involes a log(rho*outer_size) binary search. If the coefficient does not
103 * exist yet, then a sorted insertion into a sequential buffer is performed.
104 */
105 inline Scalar& coeffRef(Index row, Index col)
106 {
107 const Index outer = IsRowMajor ? row : col;
108 const Index inner = IsRowMajor ? col : row;
109 return m_data[outer].atWithInsertion(inner);
110 }
111
112 class InnerIterator;
113 class ReverseInnerIterator;
114
115 void setZero()
116 {
117 for (Index j=0; j<outerSize(); ++j)
118 m_data[j].clear();
119 }
120
121 /** \returns the number of non zero coefficients */
122 Index nonZeros() const
123 {
124 Index res = 0;
125 for (Index j=0; j<outerSize(); ++j)
Austin Schuh189376f2018-12-20 22:11:15 +1100126 res += m_data[j].size();
Brian Silverman72890c22015-09-19 14:37:37 -0400127 return res;
128 }
129
130
131
132 void reserve(Index reserveSize = 1000)
133 {
134 if (outerSize()>0)
135 {
136 Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),Index(4));
137 for (Index j=0; j<outerSize(); ++j)
138 {
139 m_data[j].reserve(reserveSizePerVector);
140 }
141 }
142 }
143
144 /** Does nothing: provided for compatibility with SparseMatrix */
145 inline void startVec(Index /*outer*/) {}
146
147 /** \returns a reference to the non zero coefficient at position \a row, \a col assuming that:
148 * - the nonzero does not already exist
149 * - the new coefficient is the last one of the given inner vector.
150 *
151 * \sa insert, insertBackByOuterInner */
152 inline Scalar& insertBack(Index row, Index col)
153 {
154 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
155 }
156
157 /** \sa insertBack */
158 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
159 {
160 eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize && "out of range");
161 eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner))
162 && "wrong sorted insertion");
163 m_data[outer].append(0, inner);
164 return m_data[outer].value(m_data[outer].size()-1);
165 }
166
167 inline Scalar& insert(Index row, Index col)
168 {
169 const Index outer = IsRowMajor ? row : col;
170 const Index inner = IsRowMajor ? col : row;
171
172 Index startId = 0;
173 Index id = static_cast<Index>(m_data[outer].size()) - 1;
174 m_data[outer].resize(id+2,1);
175
176 while ( (id >= startId) && (m_data[outer].index(id) > inner) )
177 {
178 m_data[outer].index(id+1) = m_data[outer].index(id);
179 m_data[outer].value(id+1) = m_data[outer].value(id);
180 --id;
181 }
182 m_data[outer].index(id+1) = inner;
183 m_data[outer].value(id+1) = 0;
184 return m_data[outer].value(id+1);
185 }
186
187 /** Does nothing: provided for compatibility with SparseMatrix */
188 inline void finalize() {}
189
190 /** Suppress all nonzeros which are smaller than \a reference under the tolerence \a epsilon */
191 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
192 {
193 for (Index j=0; j<outerSize(); ++j)
194 m_data[j].prune(reference,epsilon);
195 }
196
197 /** Resize the matrix without preserving the data (the matrix is set to zero)
198 */
199 void resize(Index rows, Index cols)
200 {
201 const Index outerSize = IsRowMajor ? rows : cols;
Austin Schuh189376f2018-12-20 22:11:15 +1100202 m_innerSize = convert_index(IsRowMajor ? cols : rows);
Brian Silverman72890c22015-09-19 14:37:37 -0400203 setZero();
204 if (Index(m_data.size()) != outerSize)
205 {
206 m_data.resize(outerSize);
207 }
208 }
209
210 void resizeAndKeepData(Index rows, Index cols)
211 {
212 const Index outerSize = IsRowMajor ? rows : cols;
213 const Index innerSize = IsRowMajor ? cols : rows;
214 if (m_innerSize>innerSize)
215 {
216 // remove all coefficients with innerCoord>=innerSize
217 // TODO
218 //std::cerr << "not implemented yet\n";
219 exit(2);
220 }
221 if (m_data.size() != outerSize)
222 {
223 m_data.resize(outerSize);
224 }
225 }
226
227 /** The class DynamicSparseMatrix is deprectaed */
228 EIGEN_DEPRECATED inline DynamicSparseMatrix()
229 : m_innerSize(0), m_data(0)
230 {
Austin Schuh189376f2018-12-20 22:11:15 +1100231 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
232 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
233 #endif
Brian Silverman72890c22015-09-19 14:37:37 -0400234 eigen_assert(innerSize()==0 && outerSize()==0);
235 }
236
237 /** The class DynamicSparseMatrix is deprectaed */
238 EIGEN_DEPRECATED inline DynamicSparseMatrix(Index rows, Index cols)
239 : m_innerSize(0)
240 {
Austin Schuh189376f2018-12-20 22:11:15 +1100241 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
242 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
243 #endif
Brian Silverman72890c22015-09-19 14:37:37 -0400244 resize(rows, cols);
245 }
246
247 /** The class DynamicSparseMatrix is deprectaed */
248 template<typename OtherDerived>
249 EIGEN_DEPRECATED explicit inline DynamicSparseMatrix(const SparseMatrixBase<OtherDerived>& other)
250 : m_innerSize(0)
251 {
Austin Schuh189376f2018-12-20 22:11:15 +1100252 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
253 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
254 #endif
255 Base::operator=(other.derived());
Brian Silverman72890c22015-09-19 14:37:37 -0400256 }
257
258 inline DynamicSparseMatrix(const DynamicSparseMatrix& other)
259 : Base(), m_innerSize(0)
260 {
Austin Schuh189376f2018-12-20 22:11:15 +1100261 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
262 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
263 #endif
Brian Silverman72890c22015-09-19 14:37:37 -0400264 *this = other.derived();
265 }
266
267 inline void swap(DynamicSparseMatrix& other)
268 {
269 //EIGEN_DBG_SPARSE(std::cout << "SparseMatrix:: swap\n");
270 std::swap(m_innerSize, other.m_innerSize);
271 //std::swap(m_outerSize, other.m_outerSize);
272 m_data.swap(other.m_data);
273 }
274
275 inline DynamicSparseMatrix& operator=(const DynamicSparseMatrix& other)
276 {
277 if (other.isRValue())
278 {
279 swap(other.const_cast_derived());
280 }
281 else
282 {
283 resize(other.rows(), other.cols());
284 m_data = other.m_data;
285 }
286 return *this;
287 }
288
289 /** Destructor */
290 inline ~DynamicSparseMatrix() {}
291
292 public:
293
294 /** \deprecated
295 * Set the matrix to zero and reserve the memory for \a reserveSize nonzero coefficients. */
296 EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
297 {
298 setZero();
299 reserve(reserveSize);
300 }
301
302 /** \deprecated use insert()
303 * inserts a nonzero coefficient at given coordinates \a row, \a col and returns its reference assuming that:
304 * 1 - the coefficient does not exist yet
305 * 2 - this the coefficient with greater inner coordinate for the given outer coordinate.
306 * In other words, assuming \c *this is column-major, then there must not exists any nonzero coefficient of coordinates
307 * \c i \c x \a col such that \c i >= \a row. Otherwise the matrix is invalid.
308 *
309 * \see fillrand(), coeffRef()
310 */
311 EIGEN_DEPRECATED Scalar& fill(Index row, Index col)
312 {
313 const Index outer = IsRowMajor ? row : col;
314 const Index inner = IsRowMajor ? col : row;
315 return insertBack(outer,inner);
316 }
317
318 /** \deprecated use insert()
319 * Like fill() but with random inner coordinates.
320 * Compared to the generic coeffRef(), the unique limitation is that we assume
321 * the coefficient does not exist yet.
322 */
323 EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col)
324 {
325 return insert(row,col);
326 }
327
328 /** \deprecated use finalize()
329 * Does nothing. Provided for compatibility with SparseMatrix. */
330 EIGEN_DEPRECATED void endFill() {}
331
332# ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
333# include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
334# endif
335 };
336
Austin Schuh189376f2018-12-20 22:11:15 +1100337template<typename Scalar, int _Options, typename _StorageIndex>
338class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::InnerIterator : public SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator
Brian Silverman72890c22015-09-19 14:37:37 -0400339{
Austin Schuh189376f2018-12-20 22:11:15 +1100340 typedef typename SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator Base;
Brian Silverman72890c22015-09-19 14:37:37 -0400341 public:
342 InnerIterator(const DynamicSparseMatrix& mat, Index outer)
343 : Base(mat.m_data[outer]), m_outer(outer)
344 {}
345
346 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
347 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
Austin Schuh189376f2018-12-20 22:11:15 +1100348 inline Index outer() const { return m_outer; }
Brian Silverman72890c22015-09-19 14:37:37 -0400349
350 protected:
351 const Index m_outer;
352};
353
Austin Schuh189376f2018-12-20 22:11:15 +1100354template<typename Scalar, int _Options, typename _StorageIndex>
355class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::ReverseInnerIterator : public SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator
Brian Silverman72890c22015-09-19 14:37:37 -0400356{
Austin Schuh189376f2018-12-20 22:11:15 +1100357 typedef typename SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator Base;
Brian Silverman72890c22015-09-19 14:37:37 -0400358 public:
359 ReverseInnerIterator(const DynamicSparseMatrix& mat, Index outer)
360 : Base(mat.m_data[outer]), m_outer(outer)
361 {}
362
363 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
364 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
Austin Schuh189376f2018-12-20 22:11:15 +1100365 inline Index outer() const { return m_outer; }
Brian Silverman72890c22015-09-19 14:37:37 -0400366
367 protected:
368 const Index m_outer;
369};
370
Austin Schuh189376f2018-12-20 22:11:15 +1100371namespace internal {
372
373template<typename _Scalar, int _Options, typename _StorageIndex>
374struct evaluator<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
375 : evaluator_base<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
376{
377 typedef _Scalar Scalar;
378 typedef DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> SparseMatrixType;
379 typedef typename SparseMatrixType::InnerIterator InnerIterator;
380 typedef typename SparseMatrixType::ReverseInnerIterator ReverseInnerIterator;
381
382 enum {
383 CoeffReadCost = NumTraits<_Scalar>::ReadCost,
384 Flags = SparseMatrixType::Flags
385 };
386
387 evaluator() : m_matrix(0) {}
388 evaluator(const SparseMatrixType &mat) : m_matrix(&mat) {}
389
390 operator SparseMatrixType&() { return m_matrix->const_cast_derived(); }
391 operator const SparseMatrixType&() const { return *m_matrix; }
392
393 Scalar coeff(Index row, Index col) const { return m_matrix->coeff(row,col); }
394
395 Index nonZerosEstimate() const { return m_matrix->nonZeros(); }
396
397 const SparseMatrixType *m_matrix;
398};
399
400}
401
Brian Silverman72890c22015-09-19 14:37:37 -0400402} // end namespace Eigen
403
404#endif // EIGEN_DYNAMIC_SPARSEMATRIX_H