00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025 #ifndef EIGEN_SPARSEMATRIX_H
00026 #define EIGEN_SPARSEMATRIX_H
00027
00048 namespace internal {
00049 template<typename _Scalar, int _Options, typename _Index>
00050 struct traits<SparseMatrix<_Scalar, _Options, _Index> >
00051 {
00052 typedef _Scalar Scalar;
00053 typedef _Index Index;
00054 typedef Sparse StorageKind;
00055 typedef MatrixXpr XprKind;
00056 enum {
00057 RowsAtCompileTime = Dynamic,
00058 ColsAtCompileTime = Dynamic,
00059 MaxRowsAtCompileTime = Dynamic,
00060 MaxColsAtCompileTime = Dynamic,
00061 Flags = _Options | NestByRefBit | LvalueBit,
00062 CoeffReadCost = NumTraits<Scalar>::ReadCost,
00063 SupportedAccessPatterns = InnerRandomAccessPattern
00064 };
00065 };
00066
00067 }
00068
00069 template<typename _Scalar, int _Options, typename _Index>
00070 class SparseMatrix
00071 : public SparseMatrixBase<SparseMatrix<_Scalar, _Options, _Index> >
00072 {
00073 public:
00074 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseMatrix)
00075
00076 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseMatrix, +=)
00077 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseMatrix, -=)
00078
00079
00080
00081
00082 typedef MappedSparseMatrix<Scalar,Flags> Map;
00083 using Base::IsRowMajor;
00084 typedef CompressedStorage<Scalar,Index> Storage;
00085 enum {
00086 Options = _Options
00087 };
00088
00089 protected:
00090
00091 typedef SparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix;
00092
00093 Index m_outerSize;
00094 Index m_innerSize;
00095 Index* m_outerIndex;
00096 CompressedStorage<Scalar,Index> m_data;
00097
00098 public:
00099
00100 inline Index rows() const { return IsRowMajor ? m_outerSize : m_innerSize; }
00101 inline Index cols() const { return IsRowMajor ? m_innerSize : m_outerSize; }
00102
00103 inline Index innerSize() const { return m_innerSize; }
00104 inline Index outerSize() const { return m_outerSize; }
00105 inline Index innerNonZeros(Index j) const { return m_outerIndex[j+1]-m_outerIndex[j]; }
00106
00107 inline const Scalar* _valuePtr() const { return &m_data.value(0); }
00108 inline Scalar* _valuePtr() { return &m_data.value(0); }
00109
00110 inline const Index* _innerIndexPtr() const { return &m_data.index(0); }
00111 inline Index* _innerIndexPtr() { return &m_data.index(0); }
00112
00113 inline const Index* _outerIndexPtr() const { return m_outerIndex; }
00114 inline Index* _outerIndexPtr() { return m_outerIndex; }
00115
00116 inline Storage& data() { return m_data; }
00117 inline const Storage& data() const { return m_data; }
00118
00119 inline Scalar coeff(Index row, Index col) const
00120 {
00121 const Index outer = IsRowMajor ? row : col;
00122 const Index inner = IsRowMajor ? col : row;
00123 return m_data.atInRange(m_outerIndex[outer], m_outerIndex[outer+1], inner);
00124 }
00125
00126 inline Scalar& coeffRef(Index row, Index col)
00127 {
00128 const Index outer = IsRowMajor ? row : col;
00129 const Index inner = IsRowMajor ? col : row;
00130
00131 Index start = m_outerIndex[outer];
00132 Index end = m_outerIndex[outer+1];
00133 eigen_assert(end>=start && "you probably called coeffRef on a non finalized matrix");
00134 eigen_assert(end>start && "coeffRef cannot be called on a zero coefficient");
00135 const Index p = m_data.searchLowerIndex(start,end-1,inner);
00136 eigen_assert((p<end) && (m_data.index(p)==inner) && "coeffRef cannot be called on a zero coefficient");
00137 return m_data.value(p);
00138 }
00139
00140 public:
00141
00142 class InnerIterator;
00143
00145 inline void setZero()
00146 {
00147 m_data.clear();
00148 memset(m_outerIndex, 0, (m_outerSize+1)*sizeof(Index));
00149 }
00150
00152 inline Index nonZeros() const { return static_cast<Index>(m_data.size()); }
00153
00155 inline void reserve(Index reserveSize)
00156 {
00157 m_data.reserve(reserveSize);
00158 }
00159
00160
00161
00171 inline Scalar& insertBack(Index row, Index col)
00172 {
00173 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
00174 }
00175
00177 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
00178 {
00179 eigen_assert(size_t(m_outerIndex[outer+1]) == m_data.size() && "Invalid ordered insertion (invalid outer index)");
00180 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.index(m_data.size()-1)<inner) && "Invalid ordered insertion (invalid inner index)");
00181 Index p = m_outerIndex[outer+1];
00182 ++m_outerIndex[outer+1];
00183 m_data.append(0, inner);
00184 return m_data.value(p);
00185 }
00186
00188 inline Scalar& insertBackByOuterInnerUnordered(Index outer, Index inner)
00189 {
00190 Index p = m_outerIndex[outer+1];
00191 ++m_outerIndex[outer+1];
00192 m_data.append(0, inner);
00193 return m_data.value(p);
00194 }
00195
00197 inline void startVec(Index outer)
00198 {
00199 eigen_assert(m_outerIndex[outer]==int(m_data.size()) && "You must call startVec for each inner vector sequentially");
00200 eigen_assert(m_outerIndex[outer+1]==0 && "You must call startVec for each inner vector sequentially");
00201 m_outerIndex[outer+1] = m_outerIndex[outer];
00202 }
00203
00204
00205
00214 EIGEN_DONT_INLINE Scalar& insert(Index row, Index col)
00215 {
00216 const Index outer = IsRowMajor ? row : col;
00217 const Index inner = IsRowMajor ? col : row;
00218
00219 Index previousOuter = outer;
00220 if (m_outerIndex[outer+1]==0)
00221 {
00222
00223 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
00224 {
00225 m_outerIndex[previousOuter] = static_cast<Index>(m_data.size());
00226 --previousOuter;
00227 }
00228 m_outerIndex[outer+1] = m_outerIndex[outer];
00229 }
00230
00231
00232
00233
00234 bool isLastVec = (!(previousOuter==-1 && m_data.size()!=0))
00235 && (size_t(m_outerIndex[outer+1]) == m_data.size());
00236
00237 size_t startId = m_outerIndex[outer];
00238
00239 size_t p = m_outerIndex[outer+1];
00240 ++m_outerIndex[outer+1];
00241
00242 float reallocRatio = 1;
00243 if (m_data.allocatedSize()<=m_data.size())
00244 {
00245
00246 if (m_data.size()==0)
00247 {
00248 m_data.reserve(32);
00249 }
00250 else
00251 {
00252
00253
00254
00255 float nnzEstimate = float(m_outerIndex[outer])*float(m_outerSize)/float(outer+1);
00256 reallocRatio = (nnzEstimate-float(m_data.size()))/float(m_data.size());
00257
00258
00259
00260 reallocRatio = std::min(std::max(reallocRatio,1.5f),8.f);
00261 }
00262 }
00263 m_data.resize(m_data.size()+1,reallocRatio);
00264
00265 if (!isLastVec)
00266 {
00267 if (previousOuter==-1)
00268 {
00269
00270
00271 for (Index k=0; k<=(outer+1); ++k)
00272 m_outerIndex[k] = 0;
00273 Index k=outer+1;
00274 while(m_outerIndex[k]==0)
00275 m_outerIndex[k++] = 1;
00276 while (k<=m_outerSize && m_outerIndex[k]!=0)
00277 m_outerIndex[k++]++;
00278 p = 0;
00279 --k;
00280 k = m_outerIndex[k]-1;
00281 while (k>0)
00282 {
00283 m_data.index(k) = m_data.index(k-1);
00284 m_data.value(k) = m_data.value(k-1);
00285 k--;
00286 }
00287 }
00288 else
00289 {
00290
00291
00292 Index j = outer+2;
00293 while (j<=m_outerSize && m_outerIndex[j]!=0)
00294 m_outerIndex[j++]++;
00295 --j;
00296
00297 Index k = m_outerIndex[j]-1;
00298 while (k>=Index(p))
00299 {
00300 m_data.index(k) = m_data.index(k-1);
00301 m_data.value(k) = m_data.value(k-1);
00302 k--;
00303 }
00304 }
00305 }
00306
00307 while ( (p > startId) && (m_data.index(p-1) > inner) )
00308 {
00309 m_data.index(p) = m_data.index(p-1);
00310 m_data.value(p) = m_data.value(p-1);
00311 --p;
00312 }
00313
00314 m_data.index(p) = inner;
00315 return (m_data.value(p) = 0);
00316 }
00317
00318
00319
00320
00323 inline void finalize()
00324 {
00325 Index size = static_cast<Index>(m_data.size());
00326 Index i = m_outerSize;
00327
00328 while (i>=0 && m_outerIndex[i]==0)
00329 --i;
00330 ++i;
00331 while (i<=m_outerSize)
00332 {
00333 m_outerIndex[i] = size;
00334 ++i;
00335 }
00336 }
00337
00339 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
00340 {
00341 prune(default_prunning_func(reference,epsilon));
00342 }
00343
00351 template<typename KeepFunc>
00352 void prune(const KeepFunc& keep = KeepFunc())
00353 {
00354 Index k = 0;
00355 for(Index j=0; j<m_outerSize; ++j)
00356 {
00357 Index previousStart = m_outerIndex[j];
00358 m_outerIndex[j] = k;
00359 Index end = m_outerIndex[j+1];
00360 for(Index i=previousStart; i<end; ++i)
00361 {
00362 if(keep(IsRowMajor?j:m_data.index(i), IsRowMajor?m_data.index(i):j, m_data.value(i)))
00363 {
00364 m_data.value(k) = m_data.value(i);
00365 m_data.index(k) = m_data.index(i);
00366 ++k;
00367 }
00368 }
00369 }
00370 m_outerIndex[m_outerSize] = k;
00371 m_data.resize(k,0);
00372 }
00373
00377 void resize(Index rows, Index cols)
00378 {
00379 const Index outerSize = IsRowMajor ? rows : cols;
00380 m_innerSize = IsRowMajor ? cols : rows;
00381 m_data.clear();
00382 if (m_outerSize != outerSize || m_outerSize==0)
00383 {
00384 delete[] m_outerIndex;
00385 m_outerIndex = new Index [outerSize+1];
00386 m_outerSize = outerSize;
00387 }
00388 memset(m_outerIndex, 0, (m_outerSize+1)*sizeof(Index));
00389 }
00390
00393 void resizeNonZeros(Index size)
00394 {
00395 m_data.resize(size);
00396 }
00397
00399 inline SparseMatrix()
00400 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0)
00401 {
00402 resize(0, 0);
00403 }
00404
00406 inline SparseMatrix(Index rows, Index cols)
00407 : m_outerSize(0), m_innerSize(0), m_outerIndex(0)
00408 {
00409 resize(rows, cols);
00410 }
00411
00413 template<typename OtherDerived>
00414 inline SparseMatrix(const SparseMatrixBase<OtherDerived>& other)
00415 : m_outerSize(0), m_innerSize(0), m_outerIndex(0)
00416 {
00417 *this = other.derived();
00418 }
00419
00421 inline SparseMatrix(const SparseMatrix& other)
00422 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0)
00423 {
00424 *this = other.derived();
00425 }
00426
00428 inline void swap(SparseMatrix& other)
00429 {
00430
00431 std::swap(m_outerIndex, other.m_outerIndex);
00432 std::swap(m_innerSize, other.m_innerSize);
00433 std::swap(m_outerSize, other.m_outerSize);
00434 m_data.swap(other.m_data);
00435 }
00436
00437 inline SparseMatrix& operator=(const SparseMatrix& other)
00438 {
00439
00440 if (other.isRValue())
00441 {
00442 swap(other.const_cast_derived());
00443 }
00444 else
00445 {
00446 resize(other.rows(), other.cols());
00447 memcpy(m_outerIndex, other.m_outerIndex, (m_outerSize+1)*sizeof(Index));
00448 m_data = other.m_data;
00449 }
00450 return *this;
00451 }
00452
00453 #ifndef EIGEN_PARSED_BY_DOXYGEN
00454 template<typename Lhs, typename Rhs>
00455 inline SparseMatrix& operator=(const SparseSparseProduct<Lhs,Rhs>& product)
00456 { return Base::operator=(product); }
00457
00458 template<typename OtherDerived>
00459 inline SparseMatrix& operator=(const ReturnByValue<OtherDerived>& other)
00460 { return Base::operator=(other); }
00461
00462 template<typename OtherDerived>
00463 inline SparseMatrix& operator=(const EigenBase<OtherDerived>& other)
00464 { return Base::operator=(other); }
00465 #endif
00466
00467 template<typename OtherDerived>
00468 EIGEN_DONT_INLINE SparseMatrix& operator=(const SparseMatrixBase<OtherDerived>& other)
00469 {
00470 const bool needToTranspose = (Flags & RowMajorBit) != (OtherDerived::Flags & RowMajorBit);
00471 if (needToTranspose)
00472 {
00473
00474
00475
00476
00477 typedef typename internal::nested<OtherDerived,2>::type OtherCopy;
00478 typedef typename internal::remove_all<OtherCopy>::type _OtherCopy;
00479 OtherCopy otherCopy(other.derived());
00480
00481 resize(other.rows(), other.cols());
00482 Eigen::Map<Matrix<Index, Dynamic, 1> > (m_outerIndex,outerSize()).setZero();
00483
00484
00485 for (Index j=0; j<otherCopy.outerSize(); ++j)
00486 for (typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
00487 ++m_outerIndex[it.index()];
00488
00489
00490 Index count = 0;
00491 VectorXi positions(outerSize());
00492 for (Index j=0; j<outerSize(); ++j)
00493 {
00494 Index tmp = m_outerIndex[j];
00495 m_outerIndex[j] = count;
00496 positions[j] = count;
00497 count += tmp;
00498 }
00499 m_outerIndex[outerSize()] = count;
00500
00501 m_data.resize(count);
00502
00503 for (Index j=0; j<otherCopy.outerSize(); ++j)
00504 {
00505 for (typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
00506 {
00507 Index pos = positions[it.index()]++;
00508 m_data.index(pos) = j;
00509 m_data.value(pos) = it.value();
00510 }
00511 }
00512 return *this;
00513 }
00514 else
00515 {
00516
00517 return SparseMatrixBase<SparseMatrix>::operator=(other.derived());
00518 }
00519 }
00520
00521 friend std::ostream & operator << (std::ostream & s, const SparseMatrix& m)
00522 {
00523 EIGEN_DBG_SPARSE(
00524 s << "Nonzero entries:\n";
00525 for (Index i=0; i<m.nonZeros(); ++i)
00526 {
00527 s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
00528 }
00529 s << std::endl;
00530 s << std::endl;
00531 s << "Column pointers:\n";
00532 for (Index i=0; i<m.outerSize(); ++i)
00533 {
00534 s << m.m_outerIndex[i] << " ";
00535 }
00536 s << " $" << std::endl;
00537 s << std::endl;
00538 );
00539 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
00540 return s;
00541 }
00542
00544 inline ~SparseMatrix()
00545 {
00546 delete[] m_outerIndex;
00547 }
00548
00550 Scalar sum() const;
00551
00552 public:
00553
00559 EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
00560 {
00561 setZero();
00562 m_data.reserve(reserveSize);
00563 }
00564
00568 EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col)
00569 {
00570 return insert(row,col);
00571 }
00572
00575 EIGEN_DEPRECATED Scalar& fill(Index row, Index col)
00576 {
00577 const Index outer = IsRowMajor ? row : col;
00578 const Index inner = IsRowMajor ? col : row;
00579
00580 if (m_outerIndex[outer+1]==0)
00581 {
00582
00583 Index i = outer;
00584 while (i>=0 && m_outerIndex[i]==0)
00585 {
00586 m_outerIndex[i] = m_data.size();
00587 --i;
00588 }
00589 m_outerIndex[outer+1] = m_outerIndex[outer];
00590 }
00591 else
00592 {
00593 eigen_assert(m_data.index(m_data.size()-1)<inner && "wrong sorted insertion");
00594 }
00595
00596 assert(size_t(m_outerIndex[outer+1]) == m_data.size());
00597 Index p = m_outerIndex[outer+1];
00598 ++m_outerIndex[outer+1];
00599
00600 m_data.append(0, inner);
00601 return m_data.value(p);
00602 }
00603
00605 EIGEN_DEPRECATED void endFill() { finalize(); }
00606
00607 # ifdef EIGEN_SPARSEMATRIX_PLUGIN
00608 # include EIGEN_SPARSEMATRIX_PLUGIN
00609 # endif
00610
00611 private:
00612 struct default_prunning_func {
00613 default_prunning_func(Scalar ref, RealScalar eps) : reference(ref), epsilon(eps) {}
00614 inline bool operator() (const Index&, const Index&, const Scalar& value) const
00615 {
00616 return !internal::isMuchSmallerThan(value, reference, epsilon);
00617 }
00618 Scalar reference;
00619 RealScalar epsilon;
00620 };
00621 };
00622
00623 template<typename Scalar, int _Options, typename _Index>
00624 class SparseMatrix<Scalar,_Options,_Index>::InnerIterator
00625 {
00626 public:
00627 InnerIterator(const SparseMatrix& mat, Index outer)
00628 : m_values(mat._valuePtr()), m_indices(mat._innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer]), m_end(mat.m_outerIndex[outer+1])
00629 {}
00630
00631 inline InnerIterator& operator++() { m_id++; return *this; }
00632
00633 inline const Scalar& value() const { return m_values[m_id]; }
00634 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
00635
00636 inline Index index() const { return m_indices[m_id]; }
00637 inline Index outer() const { return m_outer; }
00638 inline Index row() const { return IsRowMajor ? m_outer : index(); }
00639 inline Index col() const { return IsRowMajor ? index() : m_outer; }
00640
00641 inline operator bool() const { return (m_id < m_end); }
00642
00643 protected:
00644 const Scalar* m_values;
00645 const Index* m_indices;
00646 const Index m_outer;
00647 Index m_id;
00648 const Index m_end;
00649 };
00650
00651 #endif // EIGEN_SPARSEMATRIX_H