00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025 #ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
00026 #define EIGEN_DYNAMIC_SPARSEMATRIX_H
00027
00046 namespace internal {
00047 template<typename _Scalar, int _Options, typename _Index>
00048 struct traits<DynamicSparseMatrix<_Scalar, _Options, _Index> >
00049 {
00050 typedef _Scalar Scalar;
00051 typedef _Index Index;
00052 typedef Sparse StorageKind;
00053 typedef MatrixXpr XprKind;
00054 enum {
00055 RowsAtCompileTime = Dynamic,
00056 ColsAtCompileTime = Dynamic,
00057 MaxRowsAtCompileTime = Dynamic,
00058 MaxColsAtCompileTime = Dynamic,
00059 Flags = _Options | NestByRefBit | LvalueBit,
00060 CoeffReadCost = NumTraits<Scalar>::ReadCost,
00061 SupportedAccessPatterns = OuterRandomAccessPattern
00062 };
00063 };
00064 }
00065
00066 template<typename _Scalar, int _Options, typename _Index>
00067 class DynamicSparseMatrix
00068 : public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _Index> >
00069 {
00070 public:
00071 EIGEN_SPARSE_PUBLIC_INTERFACE(DynamicSparseMatrix)
00072
00073
00074
00075 typedef MappedSparseMatrix<Scalar,Flags> Map;
00076 using Base::IsRowMajor;
00077 using Base::operator=;
00078 enum {
00079 Options = _Options
00080 };
00081
00082 protected:
00083
00084 typedef DynamicSparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix;
00085
00086 Index m_innerSize;
00087 std::vector<CompressedStorage<Scalar,Index> > m_data;
00088
00089 public:
00090
00091 inline Index rows() const { return IsRowMajor ? outerSize() : m_innerSize; }
00092 inline Index cols() const { return IsRowMajor ? m_innerSize : outerSize(); }
00093 inline Index innerSize() const { return m_innerSize; }
00094 inline Index outerSize() const { return static_cast<Index>(m_data.size()); }
00095 inline Index innerNonZeros(Index j) const { return m_data[j].size(); }
00096
00097 std::vector<CompressedStorage<Scalar,Index> >& _data() { return m_data; }
00098 const std::vector<CompressedStorage<Scalar,Index> >& _data() const { return m_data; }
00099
00103 inline Scalar coeff(Index row, Index col) const
00104 {
00105 const Index outer = IsRowMajor ? row : col;
00106 const Index inner = IsRowMajor ? col : row;
00107 return m_data[outer].at(inner);
00108 }
00109
00114 inline Scalar& coeffRef(Index row, Index col)
00115 {
00116 const Index outer = IsRowMajor ? row : col;
00117 const Index inner = IsRowMajor ? col : row;
00118 return m_data[outer].atWithInsertion(inner);
00119 }
00120
00121 class InnerIterator;
00122
00123 void setZero()
00124 {
00125 for (Index j=0; j<outerSize(); ++j)
00126 m_data[j].clear();
00127 }
00128
00130 Index nonZeros() const
00131 {
00132 Index res = 0;
00133 for (Index j=0; j<outerSize(); ++j)
00134 res += static_cast<Index>(m_data[j].size());
00135 return res;
00136 }
00137
00138
00139
00140 void reserve(Index reserveSize = 1000)
00141 {
00142 if (outerSize()>0)
00143 {
00144 Index reserveSizePerVector = std::max(reserveSize/outerSize(),Index(4));
00145 for (Index j=0; j<outerSize(); ++j)
00146 {
00147 m_data[j].reserve(reserveSizePerVector);
00148 }
00149 }
00150 }
00151
00153 inline void startVec(Index ) {}
00154
00160 inline Scalar& insertBack(Index row, Index col)
00161 {
00162 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
00163 }
00164
00166 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
00167 {
00168 eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize && "out of range");
00169 eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner))
00170 && "wrong sorted insertion");
00171 m_data[outer].append(0, inner);
00172 return m_data[outer].value(m_data[outer].size()-1);
00173 }
00174
00175 inline Scalar& insert(Index row, Index col)
00176 {
00177 const Index outer = IsRowMajor ? row : col;
00178 const Index inner = IsRowMajor ? col : row;
00179
00180 Index startId = 0;
00181 Index id = static_cast<Index>(m_data[outer].size()) - 1;
00182 m_data[outer].resize(id+2,1);
00183
00184 while ( (id >= startId) && (m_data[outer].index(id) > inner) )
00185 {
00186 m_data[outer].index(id+1) = m_data[outer].index(id);
00187 m_data[outer].value(id+1) = m_data[outer].value(id);
00188 --id;
00189 }
00190 m_data[outer].index(id+1) = inner;
00191 m_data[outer].value(id+1) = 0;
00192 return m_data[outer].value(id+1);
00193 }
00194
00196 inline void finalize() {}
00197
00199 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
00200 {
00201 for (Index j=0; j<outerSize(); ++j)
00202 m_data[j].prune(reference,epsilon);
00203 }
00204
00207 void resize(Index rows, Index cols)
00208 {
00209 const Index outerSize = IsRowMajor ? rows : cols;
00210 m_innerSize = IsRowMajor ? cols : rows;
00211 setZero();
00212 if (Index(m_data.size()) != outerSize)
00213 {
00214 m_data.resize(outerSize);
00215 }
00216 }
00217
00218 void resizeAndKeepData(Index rows, Index cols)
00219 {
00220 const Index outerSize = IsRowMajor ? rows : cols;
00221 const Index innerSize = IsRowMajor ? cols : rows;
00222 if (m_innerSize>innerSize)
00223 {
00224
00225
00226
00227 exit(2);
00228 }
00229 if (m_data.size() != outerSize)
00230 {
00231 m_data.resize(outerSize);
00232 }
00233 }
00234
00235 inline DynamicSparseMatrix()
00236 : m_innerSize(0), m_data(0)
00237 {
00238 eigen_assert(innerSize()==0 && outerSize()==0);
00239 }
00240
00241 inline DynamicSparseMatrix(Index rows, Index cols)
00242 : m_innerSize(0)
00243 {
00244 resize(rows, cols);
00245 }
00246
00247 template<typename OtherDerived>
00248 explicit inline DynamicSparseMatrix(const SparseMatrixBase<OtherDerived>& other)
00249 : m_innerSize(0)
00250 {
00251 Base::operator=(other.derived());
00252 }
00253
00254 inline DynamicSparseMatrix(const DynamicSparseMatrix& other)
00255 : Base(), m_innerSize(0)
00256 {
00257 *this = other.derived();
00258 }
00259
00260 inline void swap(DynamicSparseMatrix& other)
00261 {
00262
00263 std::swap(m_innerSize, other.m_innerSize);
00264
00265 m_data.swap(other.m_data);
00266 }
00267
00268 inline DynamicSparseMatrix& operator=(const DynamicSparseMatrix& other)
00269 {
00270 if (other.isRValue())
00271 {
00272 swap(other.const_cast_derived());
00273 }
00274 else
00275 {
00276 resize(other.rows(), other.cols());
00277 m_data = other.m_data;
00278 }
00279 return *this;
00280 }
00281
00283 inline ~DynamicSparseMatrix() {}
00284
00285 public:
00286
00289 EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
00290 {
00291 setZero();
00292 reserve(reserveSize);
00293 }
00294
00304 EIGEN_DEPRECATED Scalar& fill(Index row, Index col)
00305 {
00306 const Index outer = IsRowMajor ? row : col;
00307 const Index inner = IsRowMajor ? col : row;
00308 return insertBack(outer,inner);
00309 }
00310
00316 EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col)
00317 {
00318 return insert(row,col);
00319 }
00320
00323 EIGEN_DEPRECATED void endFill() {}
00324
00325 # ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
00326 # include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
00327 # endif
00328 };
00329
00330 template<typename Scalar, int _Options, typename _Index>
00331 class DynamicSparseMatrix<Scalar,_Options,_Index>::InnerIterator : public SparseVector<Scalar,_Options>::InnerIterator
00332 {
00333 typedef typename SparseVector<Scalar,_Options>::InnerIterator Base;
00334 public:
00335 InnerIterator(const DynamicSparseMatrix& mat, Index outer)
00336 : Base(mat.m_data[outer]), m_outer(outer)
00337 {}
00338
00339 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
00340 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
00341
00342 protected:
00343 const Index m_outer;
00344 };
00345
00346 #endif // EIGEN_DYNAMIC_SPARSEMATRIX_H