10 #ifndef EIGEN_SPARSEVECTOR_H
11 #define EIGEN_SPARSEVECTOR_H
29 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
30 struct traits<SparseVector<_Scalar, _Options, _StorageIndex> >
32 typedef _Scalar Scalar;
33 typedef _StorageIndex StorageIndex;
34 typedef Sparse StorageKind;
35 typedef MatrixXpr XprKind;
39 RowsAtCompileTime = IsColVector ? Dynamic : 1,
40 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
41 MaxRowsAtCompileTime = RowsAtCompileTime,
42 MaxColsAtCompileTime = ColsAtCompileTime,
44 CoeffReadCost = NumTraits<Scalar>::ReadCost,
45 SupportedAccessPatterns = InnerRandomAccessPattern
56 template<
typename Dest,
typename Src,
57 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
58 : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
60 struct sparse_vector_assign_selector;
64 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
66 :
public SparseMatrixBase<SparseVector<_Scalar, _Options, _StorageIndex> >
68 typedef SparseMatrixBase<SparseVector> SparseBase;
71 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
72 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
73 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
75 typedef
internal::CompressedStorage<Scalar,StorageIndex> Storage;
76 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
82 EIGEN_STRONG_INLINE
Index rows()
const {
return IsColVector ? m_size : 1; }
83 EIGEN_STRONG_INLINE
Index cols()
const {
return IsColVector ? 1 : m_size; }
84 EIGEN_STRONG_INLINE
Index innerSize()
const {
return m_size; }
85 EIGEN_STRONG_INLINE
Index outerSize()
const {
return 1; }
87 EIGEN_STRONG_INLINE
const Scalar* valuePtr()
const {
return &m_data.value(0); }
88 EIGEN_STRONG_INLINE Scalar* valuePtr() {
return &m_data.value(0); }
90 EIGEN_STRONG_INLINE
const StorageIndex* innerIndexPtr()
const {
return &m_data.index(0); }
91 EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() {
return &m_data.index(0); }
94 inline Storage& data() {
return m_data; }
96 inline const Storage& data()
const {
return m_data; }
100 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
101 return coeff(IsColVector ? row : col);
103 inline Scalar coeff(
Index i)
const
105 eigen_assert(i>=0 && i<m_size);
106 return m_data.at(StorageIndex(i));
111 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
112 return coeffRef(IsColVector ? row : col);
123 eigen_assert(i>=0 && i<m_size);
124 return m_data.atWithInsertion(StorageIndex(i));
130 class ReverseInnerIterator;
132 inline void setZero() { m_data.clear(); }
137 inline void startVec(Index outer)
139 EIGEN_UNUSED_VARIABLE(outer);
140 eigen_assert(outer==0);
143 inline Scalar& insertBackByOuterInner(
Index outer,
Index inner)
145 EIGEN_UNUSED_VARIABLE(outer);
146 eigen_assert(outer==0);
147 return insertBack(inner);
149 inline Scalar& insertBack(
Index i)
152 return m_data.value(m_data.size()-1);
155 Scalar& insertBackByOuterInnerUnordered(
Index outer,
Index inner)
157 EIGEN_UNUSED_VARIABLE(outer);
158 eigen_assert(outer==0);
159 return insertBackUnordered(inner);
161 inline Scalar& insertBackUnordered(
Index i)
164 return m_data.value(m_data.size()-1);
169 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
171 Index inner = IsColVector ? row :
col;
172 Index outer = IsColVector ? col :
row;
173 EIGEN_ONLY_USED_FOR_DEBUG(outer);
174 eigen_assert(outer==0);
175 return insert(inner);
177 Scalar& insert(
Index i)
179 eigen_assert(i>=0 && i<m_size);
184 m_data.resize(p+2,1);
186 while ( (p >= startId) && (m_data.index(p) > i) )
188 m_data.index(p+1) = m_data.index(p);
189 m_data.value(p+1) = m_data.value(p);
192 m_data.index(p+1) = convert_index(i);
193 m_data.value(p+1) = 0;
194 return m_data.value(p+1);
199 inline void reserve(
Index reserveSize) { m_data.reserve(reserveSize); }
202 inline void finalize() {}
204 void prune(
const Scalar& reference,
const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
206 m_data.prune(reference,epsilon);
211 eigen_assert((IsColVector ? cols : rows)==1 &&
"Outer dimension must equal 1");
212 resize(IsColVector ? rows : cols);
215 void resize(
Index newSize)
221 void resizeNonZeros(
Index size) { m_data.resize(size); }
223 inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
225 explicit inline SparseVector(
Index size) : m_size(0) { check_template_parameters(); resize(size); }
227 inline SparseVector(
Index rows,
Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); }
229 template<
typename OtherDerived>
230 inline SparseVector(
const SparseMatrixBase<OtherDerived>& other)
233 check_template_parameters();
234 *
this = other.derived();
237 inline SparseVector(
const SparseVector& other)
238 : SparseBase(other), m_size(0)
240 check_template_parameters();
241 *
this = other.derived();
250 std::swap(m_size, other.m_size);
251 m_data.swap(other.m_data);
256 if (other.isRValue())
258 swap(other.const_cast_derived());
262 resize(other.size());
263 m_data = other.m_data;
268 template<
typename OtherDerived>
269 inline SparseVector& operator=(
const SparseMatrixBase<OtherDerived>& other)
271 SparseVector tmp(other.size());
272 internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
277 #ifndef EIGEN_PARSED_BY_DOXYGEN
278 template<
typename Lhs,
typename Rhs>
279 inline SparseVector& operator=(
const SparseSparseProduct<Lhs,Rhs>& product)
281 return Base::operator=(product);
285 friend std::ostream & operator << (std::ostream & s,
const SparseVector& m)
287 for (
Index i=0; i<m.nonZeros(); ++i)
288 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
302 EIGEN_DEPRECATED
void startFill(Index reserve)
305 m_data.reserve(reserve);
309 EIGEN_DEPRECATED Scalar& fill(
Index r,
Index c)
311 eigen_assert(r==0 || c==0);
312 return fill(IsColVector ? r : c);
316 EIGEN_DEPRECATED Scalar& fill(
Index i)
319 return m_data.value(m_data.size()-1);
323 EIGEN_DEPRECATED Scalar& fillrand(
Index r,
Index c)
325 eigen_assert(r==0 || c==0);
326 return fillrand(IsColVector ? r : c);
330 EIGEN_DEPRECATED Scalar& fillrand(
Index i)
336 EIGEN_DEPRECATED
void endFill() {}
340 EIGEN_DEPRECATED Storage& _data() {
return m_data; }
342 EIGEN_DEPRECATED
const Storage& _data()
const {
return m_data; }
344 # ifdef EIGEN_SPARSEVECTOR_PLUGIN
345 # include EIGEN_SPARSEVECTOR_PLUGIN
350 static void check_template_parameters()
352 EIGEN_STATIC_ASSERT(NumTraits<StorageIndex>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
353 EIGEN_STATIC_ASSERT((_Options&(
ColMajor|
RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
360 template<
typename Scalar,
int _Options,
typename _StorageIndex>
361 class SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator
364 explicit InnerIterator(
const SparseVector& vec,
Index outer=0)
365 : m_data(vec.m_data), m_id(0), m_end(m_data.size())
367 EIGEN_UNUSED_VARIABLE(outer);
368 eigen_assert(outer==0);
371 explicit InnerIterator(
const internal::CompressedStorage<Scalar,StorageIndex>& data)
372 : m_data(data), m_id(0), m_end(m_data.size())
375 inline InnerIterator& operator++() { m_id++;
return *
this; }
377 inline Scalar value()
const {
return m_data.value(m_id); }
378 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_data.value(m_id)); }
380 inline StorageIndex index()
const {
return m_data.index(m_id); }
381 inline Index row()
const {
return IsColVector ? index() : 0; }
382 inline Index col()
const {
return IsColVector ? 0 : index(); }
384 inline operator bool()
const {
return (m_id < m_end); }
387 const internal::CompressedStorage<Scalar,StorageIndex>& m_data;
394 template<
typename T> InnerIterator(
const SparseMatrixBase<T>&,
Index outer=0);
397 template<
typename Scalar,
int _Options,
typename _StorageIndex>
398 class SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator
401 explicit ReverseInnerIterator(
const SparseVector& vec,
Index outer=0)
402 : m_data(vec.m_data), m_id(m_data.size()), m_start(0)
404 EIGEN_UNUSED_VARIABLE(outer);
405 eigen_assert(outer==0);
408 explicit ReverseInnerIterator(
const internal::CompressedStorage<Scalar,StorageIndex>& data)
409 : m_data(data), m_id(m_data.size()), m_start(0)
412 inline ReverseInnerIterator& operator--() { m_id--;
return *
this; }
414 inline Scalar value()
const {
return m_data.value(m_id-1); }
415 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_data.value(m_id-1)); }
417 inline StorageIndex index()
const {
return m_data.index(m_id-1); }
418 inline Index row()
const {
return IsColVector ? index() : 0; }
419 inline Index col()
const {
return IsColVector ? 0 : index(); }
421 inline operator bool()
const {
return (m_id > m_start); }
424 const internal::CompressedStorage<Scalar,StorageIndex>& m_data;
431 template<
typename _Scalar,
int _Options,
typename _Index>
432 struct evaluator<SparseVector<_Scalar,_Options,_Index> >
433 : evaluator_base<SparseVector<_Scalar,_Options,_Index> >
435 typedef SparseVector<_Scalar,_Options,_Index> SparseVectorType;
436 typedef typename SparseVectorType::InnerIterator InnerIterator;
437 typedef typename SparseVectorType::ReverseInnerIterator ReverseInnerIterator;
440 CoeffReadCost = NumTraits<_Scalar>::ReadCost,
441 Flags = SparseVectorType::Flags
444 explicit evaluator(
const SparseVectorType &mat) : m_matrix(mat) {}
446 inline Index nonZerosEstimate()
const {
447 return m_matrix.nonZeros();
450 operator SparseVectorType&() {
return m_matrix.const_cast_derived(); }
451 operator const SparseVectorType&()
const {
return m_matrix; }
453 const SparseVectorType &m_matrix;
456 template<
typename Dest,
typename Src>
457 struct sparse_vector_assign_selector<Dest,Src,SVA_Inner> {
458 static void run(Dest& dst,
const Src& src) {
459 eigen_internal_assert(src.innerSize()==src.size());
460 typedef internal::evaluator<Src> SrcEvaluatorType;
461 SrcEvaluatorType srcEval(src);
462 for(
typename SrcEvaluatorType::InnerIterator it(srcEval, 0); it; ++it)
463 dst.insert(it.index()) = it.value();
467 template<
typename Dest,
typename Src>
468 struct sparse_vector_assign_selector<Dest,Src,SVA_Outer> {
469 static void run(Dest& dst,
const Src& src) {
470 eigen_internal_assert(src.outerSize()==src.size());
471 typedef internal::evaluator<Src> SrcEvaluatorType;
472 SrcEvaluatorType srcEval(src);
473 for(Index i=0; i<src.size(); ++i)
475 typename SrcEvaluatorType::InnerIterator it(srcEval, i);
477 dst.insert(i) = it.value();
482 template<
typename Dest,
typename Src>
483 struct sparse_vector_assign_selector<Dest,Src,SVA_RuntimeSwitch> {
484 static void run(Dest& dst,
const Src& src) {
485 if(src.outerSize()==1) sparse_vector_assign_selector<Dest,Src,SVA_Inner>::run(dst, src);
486 else sparse_vector_assign_selector<Dest,Src,SVA_Outer>::run(dst, src);
494 #endif // EIGEN_SPARSEVECTOR_H
Index size() const
Definition: SparseMatrixBase.h:157
Definition: Constants.h:314
RowXpr row(Index i)
Definition: SparseMatrixBase.h:797
const unsigned int LvalueBit
Definition: Constants.h:130
Scalar & coeffRef(Index i)
Definition: SparseVector.h:121
Index nonZeros() const
Definition: SparseVector.h:135
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:37
const unsigned int RowMajorBit
Definition: Constants.h:53
~SparseVector()
Definition: SparseVector.h:294
void swap(SparseVector &other)
Definition: SparseVector.h:248
Scalar sum() const
Definition: SparseRedux.h:38
a sparse vector class
Definition: SparseUtil.h:70
Definition: Eigen_Colamd.h:54
ColXpr col(Index i)
Definition: SparseMatrixBase.h:778
Definition: Constants.h:312