10 #ifndef EIGEN_SPARSEMATRIX_H
11 #define EIGEN_SPARSEMATRIX_H
42 template<
typename _Scalar,
int _Options,
typename _Index>
43 struct traits<SparseMatrix<_Scalar, _Options, _Index> >
45 typedef _Scalar Scalar;
46 typedef _Index StorageIndex;
47 typedef Sparse StorageKind;
48 typedef MatrixXpr XprKind;
50 RowsAtCompileTime = Dynamic,
51 ColsAtCompileTime = Dynamic,
52 MaxRowsAtCompileTime = Dynamic,
53 MaxColsAtCompileTime = Dynamic,
55 SupportedAccessPatterns = InnerRandomAccessPattern
59 template<
typename _Scalar,
int _Options,
typename _Index,
int DiagIndex>
60 struct traits<Diagonal<SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
62 typedef SparseMatrix<_Scalar, _Options, _Index> MatrixType;
63 typedef typename ref_selector<MatrixType>::type MatrixTypeNested;
64 typedef typename remove_reference<MatrixTypeNested>::type _MatrixTypeNested;
66 typedef _Scalar Scalar;
67 typedef Dense StorageKind;
68 typedef _Index StorageIndex;
69 typedef MatrixXpr XprKind;
72 RowsAtCompileTime = Dynamic,
73 ColsAtCompileTime = 1,
74 MaxRowsAtCompileTime = Dynamic,
75 MaxColsAtCompileTime = 1,
80 template<
typename _Scalar,
int _Options,
typename _Index,
int DiagIndex>
81 struct traits<Diagonal<const SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
82 :
public traits<Diagonal<SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
91 template<
typename _Scalar,
int _Options,
typename _Index>
93 :
public SparseCompressedBase<SparseMatrix<_Scalar, _Options, _Index> >
96 typedef SparseCompressedBase<SparseMatrix>
Base;
97 using Base::isCompressed;
100 using Base::operator+=;
101 using Base::operator-=;
106 typedef typename Base::InnerIterator InnerIterator;
107 typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
110 using Base::IsRowMajor;
111 typedef internal::CompressedStorage<Scalar,StorageIndex> Storage;
123 StorageIndex* m_outerIndex;
124 StorageIndex* m_innerNonZeros;
130 inline Index rows()
const {
return IsRowMajor ? m_outerSize : m_innerSize; }
132 inline Index cols()
const {
return IsRowMajor ? m_innerSize : m_outerSize; }
142 inline const Scalar*
valuePtr()
const {
return &m_data.value(0); }
146 inline Scalar*
valuePtr() {
return &m_data.value(0); }
151 inline const StorageIndex*
innerIndexPtr()
const {
return &m_data.index(0); }
176 inline Storage& data() {
return m_data; }
178 inline const Storage& data()
const {
return m_data; }
184 eigen_assert(row>=0 && row<
rows() && col>=0 && col<
cols());
186 const Index outer = IsRowMajor ? row :
col;
187 const Index inner = IsRowMajor ? col :
row;
188 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
189 return m_data.atInRange(m_outerIndex[outer], end, StorageIndex(inner));
202 eigen_assert(row>=0 && row<
rows() && col>=0 && col<
cols());
204 const Index outer = IsRowMajor ? row :
col;
205 const Index inner = IsRowMajor ? col :
row;
207 Index start = m_outerIndex[outer];
208 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
209 eigen_assert(end>=start &&
"you probably called coeffRef on a non finalized matrix");
212 const Index p = m_data.searchLowerIndex(start,end-1,StorageIndex(inner));
213 if((p<end) && (m_data.index(p)==inner))
214 return m_data.value(p);
248 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(StorageIndex));
250 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(StorageIndex));
258 eigen_assert(isCompressed() &&
"This function does not make sense in non compressed mode.");
259 m_data.reserve(reserveSize);
262 #ifdef EIGEN_PARSED_BY_DOXYGEN
275 template<
class SizesType>
276 inline void reserve(
const SizesType& reserveSizes);
278 template<
class SizesType>
279 inline void reserve(
const SizesType& reserveSizes,
const typename SizesType::value_type& enableif =
280 #
if (!EIGEN_COMP_MSVC) || (EIGEN_COMP_MSVC>=1500)
283 SizesType::value_type())
285 EIGEN_UNUSED_VARIABLE(enableif);
286 reserveInnerVectors(reserveSizes);
288 #endif // EIGEN_PARSED_BY_DOXYGEN
290 template<
class SizesType>
291 inline void reserveInnerVectors(
const SizesType& reserveSizes)
295 Index totalReserveSize = 0;
297 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
298 if (!m_innerNonZeros) internal::throw_std_bad_alloc();
301 StorageIndex* newOuterIndex = m_innerNonZeros;
303 StorageIndex count = 0;
304 for(
Index j=0; j<m_outerSize; ++j)
306 newOuterIndex[j] = count;
307 count += reserveSizes[j] + (m_outerIndex[j+1]-m_outerIndex[j]);
308 totalReserveSize += reserveSizes[j];
310 m_data.reserve(totalReserveSize);
311 StorageIndex previousOuterIndex = m_outerIndex[m_outerSize];
312 for(
Index j=m_outerSize-1; j>=0; --j)
314 StorageIndex innerNNZ = previousOuterIndex - m_outerIndex[j];
315 for(
Index i=innerNNZ-1; i>=0; --i)
317 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
318 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
320 previousOuterIndex = m_outerIndex[j];
321 m_outerIndex[j] = newOuterIndex[j];
322 m_innerNonZeros[j] = innerNNZ;
324 m_outerIndex[m_outerSize] = m_outerIndex[m_outerSize-1] + m_innerNonZeros[m_outerSize-1] + reserveSizes[m_outerSize-1];
326 m_data.resize(m_outerIndex[m_outerSize]);
330 StorageIndex* newOuterIndex =
static_cast<StorageIndex*
>(std::malloc((m_outerSize+1)*
sizeof(StorageIndex)));
331 if (!newOuterIndex) internal::throw_std_bad_alloc();
333 StorageIndex count = 0;
334 for(
Index j=0; j<m_outerSize; ++j)
336 newOuterIndex[j] = count;
337 StorageIndex alreadyReserved = (m_outerIndex[j+1]-m_outerIndex[j]) - m_innerNonZeros[j];
338 StorageIndex toReserve = std::max<StorageIndex>(reserveSizes[j], alreadyReserved);
339 count += toReserve + m_innerNonZeros[j];
341 newOuterIndex[m_outerSize] = count;
343 m_data.resize(count);
344 for(
Index j=m_outerSize-1; j>=0; --j)
346 Index offset = newOuterIndex[j] - m_outerIndex[j];
349 StorageIndex innerNNZ = m_innerNonZeros[j];
350 for(
Index i=innerNNZ-1; i>=0; --i)
352 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
353 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
358 std::swap(m_outerIndex, newOuterIndex);
359 std::free(newOuterIndex);
379 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
384 inline Scalar& insertBackByOuterInner(
Index outer,
Index inner)
386 eigen_assert(
Index(m_outerIndex[outer+1]) == m_data.size() &&
"Invalid ordered insertion (invalid outer index)");
387 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.index(m_data.size()-1)<inner) &&
"Invalid ordered insertion (invalid inner index)");
388 Index p = m_outerIndex[outer+1];
389 ++m_outerIndex[outer+1];
390 m_data.append(Scalar(0), inner);
391 return m_data.value(p);
396 inline Scalar& insertBackByOuterInnerUnordered(
Index outer,
Index inner)
398 Index p = m_outerIndex[outer+1];
399 ++m_outerIndex[outer+1];
400 m_data.append(Scalar(0), inner);
401 return m_data.value(p);
406 inline void startVec(
Index outer)
408 eigen_assert(m_outerIndex[outer]==
Index(m_data.size()) &&
"You must call startVec for each inner vector sequentially");
409 eigen_assert(m_outerIndex[outer+1]==0 &&
"You must call startVec for each inner vector sequentially");
410 m_outerIndex[outer+1] = m_outerIndex[outer];
416 inline void finalize()
420 StorageIndex
size = internal::convert_index<StorageIndex>(m_data.size());
421 Index i = m_outerSize;
423 while (i>=0 && m_outerIndex[i]==0)
426 while (i<=m_outerSize)
428 m_outerIndex[i] =
size;
436 template<
typename InputIterators>
437 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end);
439 void sumupDuplicates();
447 return insert(IsRowMajor ? j : i, IsRowMajor ? i : j);
457 eigen_internal_assert(m_outerIndex!=0 && m_outerSize>0);
459 Index oldStart = m_outerIndex[1];
460 m_outerIndex[1] = m_innerNonZeros[0];
461 for(
Index j=1; j<m_outerSize; ++j)
463 Index nextOldStart = m_outerIndex[j+1];
464 Index offset = oldStart - m_outerIndex[j];
467 for(
Index k=0; k<m_innerNonZeros[j]; ++k)
469 m_data.index(m_outerIndex[j]+k) = m_data.index(oldStart+k);
470 m_data.value(m_outerIndex[j]+k) = m_data.value(oldStart+k);
473 m_outerIndex[j+1] = m_outerIndex[j] + m_innerNonZeros[j];
474 oldStart = nextOldStart;
476 std::free(m_innerNonZeros);
478 m_data.resize(m_outerIndex[m_outerSize]);
485 if(m_innerNonZeros != 0)
487 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
488 for (
Index i = 0; i < m_outerSize; i++)
490 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
497 prune(default_prunning_func(reference,epsilon));
507 template<
typename KeepFunc>
508 void prune(
const KeepFunc& keep = KeepFunc())
515 for(
Index j=0; j<m_outerSize; ++j)
517 Index previousStart = m_outerIndex[j];
519 Index end = m_outerIndex[j+1];
520 for(
Index i=previousStart; i<end; ++i)
522 if(keep(IsRowMajor?j:m_data.index(i), IsRowMajor?m_data.index(i):j, m_data.value(i)))
524 m_data.value(k) = m_data.value(i);
525 m_data.index(k) = m_data.index(i);
530 m_outerIndex[m_outerSize] = k;
540 if (this->
rows() == rows && this->
cols() == cols)
return;
543 if(rows==0 || cols==0)
return resize(rows,cols);
545 Index innerChange = IsRowMajor ? cols - this->
cols() : rows - this->
rows();
546 Index outerChange = IsRowMajor ? rows - this->
rows() : cols - this->
cols();
547 StorageIndex newInnerSize = convert_index(IsRowMajor ? cols : rows);
553 StorageIndex *newInnerNonZeros =
static_cast<StorageIndex*
>(std::realloc(m_innerNonZeros, (m_outerSize + outerChange) *
sizeof(StorageIndex)));
554 if (!newInnerNonZeros) internal::throw_std_bad_alloc();
555 m_innerNonZeros = newInnerNonZeros;
557 for(
Index i=m_outerSize; i<m_outerSize+outerChange; i++)
558 m_innerNonZeros[i] = 0;
560 else if (innerChange < 0)
563 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc((m_outerSize+outerChange+1) *
sizeof(StorageIndex)));
564 if (!m_innerNonZeros) internal::throw_std_bad_alloc();
565 for(
Index i = 0; i < m_outerSize; i++)
566 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
570 if (m_innerNonZeros && innerChange < 0)
572 for(
Index i = 0; i < m_outerSize + (std::min)(outerChange,
Index(0)); i++)
574 StorageIndex &n = m_innerNonZeros[i];
575 StorageIndex start = m_outerIndex[i];
576 while (n > 0 && m_data.index(start+n-1) >= newInnerSize) --n;
580 m_innerSize = newInnerSize;
583 if (outerChange == 0)
586 StorageIndex *newOuterIndex =
static_cast<StorageIndex*
>(std::realloc(m_outerIndex, (m_outerSize + outerChange + 1) *
sizeof(StorageIndex)));
587 if (!newOuterIndex) internal::throw_std_bad_alloc();
588 m_outerIndex = newOuterIndex;
591 StorageIndex last = m_outerSize == 0 ? 0 : m_outerIndex[m_outerSize];
592 for(
Index i=m_outerSize; i<m_outerSize+outerChange+1; i++)
593 m_outerIndex[i] = last;
595 m_outerSize += outerChange;
608 m_innerSize = IsRowMajor ? cols :
rows;
610 if (m_outerSize != outerSize || m_outerSize==0)
612 std::free(m_outerIndex);
613 m_outerIndex =
static_cast<StorageIndex*
>(std::malloc((outerSize + 1) *
sizeof(StorageIndex)));
614 if (!m_outerIndex) internal::throw_std_bad_alloc();
620 std::free(m_innerNonZeros);
623 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(StorageIndex));
628 void resizeNonZeros(Index size)
635 const ConstDiagonalReturnType
diagonal()
const {
return ConstDiagonalReturnType(*
this); }
641 DiagonalReturnType
diagonal() {
return DiagonalReturnType(*
this); }
645 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
647 check_template_parameters();
653 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
655 check_template_parameters();
660 template<
typename OtherDerived>
662 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
664 EIGEN_STATIC_ASSERT((internal::is_same<Scalar, typename OtherDerived::Scalar>::value),
665 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
666 check_template_parameters();
668 if (needToTranspose) *
this = other.
derived();
669 else internal::call_assignment_no_alias(*
this, other.
derived());
673 template<
typename OtherDerived,
unsigned int UpLo>
675 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
677 check_template_parameters();
678 Base::operator=(other);
683 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
685 check_template_parameters();
690 template<
typename OtherDerived>
692 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
694 check_template_parameters();
695 initAssignment(other);
700 template<
typename OtherDerived>
702 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
704 check_template_parameters();
705 *
this = other.derived();
713 std::swap(m_outerIndex, other.m_outerIndex);
714 std::swap(m_innerSize, other.m_innerSize);
715 std::swap(m_outerSize, other.m_outerSize);
716 std::swap(m_innerNonZeros, other.m_innerNonZeros);
717 m_data.swap(other.m_data);
723 eigen_assert(
rows() ==
cols() &&
"ONLY FOR SQUARED MATRICES");
724 this->m_data.resize(
rows());
731 if (other.isRValue())
733 swap(other.const_cast_derived());
735 else if(
this!=&other)
737 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
738 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
740 initAssignment(other);
741 if(other.isCompressed())
743 internal::smart_copy(other.m_outerIndex, other.m_outerIndex + m_outerSize + 1, m_outerIndex);
744 m_data = other.m_data;
748 Base::operator=(other);
754 #ifndef EIGEN_PARSED_BY_DOXYGEN
755 template<
typename OtherDerived>
756 inline SparseMatrix& operator=(
const EigenBase<OtherDerived>& other)
757 {
return Base::operator=(other.derived()); }
758 #endif // EIGEN_PARSED_BY_DOXYGEN
760 template<
typename OtherDerived>
761 EIGEN_DONT_INLINE
SparseMatrix& operator=(
const SparseMatrixBase<OtherDerived>& other);
763 friend std::ostream & operator << (std::ostream & s,
const SparseMatrix& m)
766 s <<
"Nonzero entries:\n";
768 for (
Index i=0; i<m.nonZeros(); ++i)
769 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
771 for (
Index i=0; i<m.outerSize(); ++i)
773 Index p = m.m_outerIndex[i];
774 Index pe = m.m_outerIndex[i]+m.m_innerNonZeros[i];
777 s <<
"(" << m.m_data.value(k) <<
"," << m.m_data.index(k) <<
") ";
778 for (; k<m.m_outerIndex[i+1]; ++k)
783 s <<
"Outer pointers:\n";
784 for (
Index i=0; i<m.outerSize(); ++i)
785 s << m.m_outerIndex[i] <<
" ";
786 s <<
" $" << std::endl;
787 if(!m.isCompressed())
789 s <<
"Inner non zeros:\n";
790 for (
Index i=0; i<m.outerSize(); ++i)
791 s << m.m_innerNonZeros[i] <<
" ";
792 s <<
" $" << std::endl;
796 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
803 std::free(m_outerIndex);
804 std::free(m_innerNonZeros);
810 # ifdef EIGEN_SPARSEMATRIX_PLUGIN
811 # include EIGEN_SPARSEMATRIX_PLUGIN
816 template<
typename Other>
817 void initAssignment(
const Other& other)
819 resize(other.rows(), other.cols());
822 std::free(m_innerNonZeros);
833 class SingletonVector
835 StorageIndex m_index;
836 StorageIndex m_value;
840 : m_index(convert_index(i)), m_value(convert_index(v))
843 StorageIndex operator[](
Index i)
const {
return i==m_index ? m_value : 0; }
855 const Index outer = IsRowMajor ? row :
col;
856 const Index inner = IsRowMajor ? col :
row;
858 eigen_assert(!isCompressed());
859 eigen_assert(m_innerNonZeros[outer]<=(m_outerIndex[outer+1] - m_outerIndex[outer]));
861 Index p = m_outerIndex[outer] + m_innerNonZeros[outer]++;
862 m_data.index(p) = convert_index(inner);
863 return (m_data.value(p) = 0);
867 static void check_template_parameters()
869 EIGEN_STATIC_ASSERT(NumTraits<StorageIndex>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
870 EIGEN_STATIC_ASSERT((Options&(
ColMajor|
RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
873 struct default_prunning_func {
874 default_prunning_func(
const Scalar& ref,
const RealScalar& eps) : reference(ref), epsilon(eps) {}
875 inline bool operator() (
const Index&,
const Index&,
const Scalar& value)
const
877 return !internal::isMuchSmallerThan(value, reference, epsilon);
886 template<
typename InputIterator,
typename SparseMatrixType>
887 void set_from_triplets(
const InputIterator& begin,
const InputIterator& end, SparseMatrixType& mat,
int Options = 0)
889 EIGEN_UNUSED_VARIABLE(Options);
890 enum { IsRowMajor = SparseMatrixType::IsRowMajor };
891 typedef typename SparseMatrixType::Scalar Scalar;
892 typedef typename SparseMatrixType::StorageIndex StorageIndex;
893 SparseMatrix<Scalar,IsRowMajor?ColMajor:RowMajor,StorageIndex> trMat(mat.rows(),mat.cols());
898 typename SparseMatrixType::IndexVector wi(trMat.outerSize());
900 for(InputIterator it(begin); it!=end; ++it)
902 eigen_assert(it->row()>=0 && it->row()<mat.rows() && it->col()>=0 && it->col()<mat.cols());
903 wi(IsRowMajor ? it->col() : it->row())++;
908 for(InputIterator it(begin); it!=end; ++it)
909 trMat.insertBackUncompressed(it->row(),it->col()) = it->value();
912 trMat.sumupDuplicates();
959 template<
typename Scalar,
int _Options,
typename _Index>
960 template<
typename InputIterators>
963 internal::set_from_triplets(begin, end, *
this);
967 template<
typename Scalar,
int _Options,
typename _Index>
970 eigen_assert(!isCompressed());
972 IndexVector wi(innerSize());
974 StorageIndex count = 0;
976 for(Index j=0; j<outerSize(); ++j)
978 StorageIndex start = count;
979 Index oldEnd = m_outerIndex[j]+m_innerNonZeros[j];
980 for(Index k=m_outerIndex[j]; k<oldEnd; ++k)
982 Index i = m_data.index(k);
986 m_data.value(wi(i)) += m_data.value(k);
990 m_data.value(count) = m_data.value(k);
991 m_data.index(count) = m_data.index(k);
996 m_outerIndex[j] = start;
998 m_outerIndex[m_outerSize] = count;
1001 std::free(m_innerNonZeros);
1002 m_innerNonZeros = 0;
1003 m_data.resize(m_outerIndex[m_outerSize]);
1006 template<
typename Scalar,
int _Options,
typename _Index>
1007 template<
typename OtherDerived>
1008 EIGEN_DONT_INLINE SparseMatrix<Scalar,_Options,_Index>& SparseMatrix<Scalar,_Options,_Index>::operator=(
const SparseMatrixBase<OtherDerived>& other)
1010 EIGEN_STATIC_ASSERT((internal::is_same<Scalar, typename OtherDerived::Scalar>::value),
1011 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
1013 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
1014 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
1017 const bool needToTranspose = (Flags &
RowMajorBit) != (internal::evaluator<OtherDerived>::Flags &
RowMajorBit);
1018 if (needToTranspose)
1024 typedef typename internal::nested_eval<OtherDerived,2,typename internal::plain_matrix_type<OtherDerived>::type >::type OtherCopy;
1025 typedef typename internal::remove_all<OtherCopy>::type _OtherCopy;
1026 typedef internal::evaluator<_OtherCopy> OtherCopyEval;
1027 OtherCopy otherCopy(other.derived());
1028 OtherCopyEval otherCopyEval(otherCopy);
1030 SparseMatrix dest(other.rows(),other.cols());
1035 for (Index j=0; j<otherCopy.outerSize(); ++j)
1036 for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
1037 ++dest.m_outerIndex[it.index()];
1040 StorageIndex count = 0;
1041 IndexVector positions(dest.outerSize());
1042 for (Index j=0; j<dest.outerSize(); ++j)
1044 Index tmp = dest.m_outerIndex[j];
1045 dest.m_outerIndex[j] = count;
1046 positions[j] = count;
1049 dest.m_outerIndex[dest.outerSize()] = count;
1051 dest.m_data.resize(count);
1053 for (StorageIndex j=0; j<otherCopy.outerSize(); ++j)
1055 for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
1057 Index pos = positions[it.index()]++;
1058 dest.m_data.index(pos) = j;
1059 dest.m_data.value(pos) = it.value();
1067 if(other.isRValue())
1069 initAssignment(other.derived());
1072 return Base::operator=(other.derived());
1076 template<
typename _Scalar,
int _Options,
typename _Index>
1079 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
1081 const Index outer = IsRowMajor ? row : col;
1082 const Index inner = IsRowMajor ? col : row;
1089 if(m_data.allocatedSize()==0)
1090 m_data.reserve(2*m_innerSize);
1093 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
1094 if(!m_innerNonZeros) internal::throw_std_bad_alloc();
1096 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(StorageIndex));
1100 StorageIndex end = convert_index(m_data.allocatedSize());
1101 for(
Index j=1; j<=m_outerSize; ++j)
1102 m_outerIndex[j] = end;
1107 Index data_end = m_data.allocatedSize();
1111 if(m_outerIndex[outer]==data_end)
1113 eigen_internal_assert(m_innerNonZeros[outer]==0);
1117 StorageIndex p = convert_index(m_data.size());
1119 while(j>=0 && m_innerNonZeros[j]==0)
1120 m_outerIndex[j--] = p;
1123 ++m_innerNonZeros[outer];
1124 m_data.append(Scalar(0), inner);
1127 if(data_end != m_data.allocatedSize())
1132 eigen_internal_assert(data_end < m_data.allocatedSize());
1133 StorageIndex new_end = convert_index(m_data.allocatedSize());
1134 for(
Index k=outer+1; k<=m_outerSize; ++k)
1135 if(m_outerIndex[k]==data_end)
1136 m_outerIndex[k] = new_end;
1138 return m_data.value(p);
1143 if(m_outerIndex[outer+1]==data_end && m_outerIndex[outer]+m_innerNonZeros[outer]==m_data.size())
1145 eigen_internal_assert(outer+1==m_outerSize || m_innerNonZeros[outer+1]==0);
1148 ++m_innerNonZeros[outer];
1149 m_data.resize(m_data.size()+1);
1152 if(data_end != m_data.allocatedSize())
1157 eigen_internal_assert(data_end < m_data.allocatedSize());
1158 StorageIndex new_end = convert_index(m_data.allocatedSize());
1159 for(
Index k=outer+1; k<=m_outerSize; ++k)
1160 if(m_outerIndex[k]==data_end)
1161 m_outerIndex[k] = new_end;
1165 Index startId = m_outerIndex[outer];
1166 Index p = m_outerIndex[outer]+m_innerNonZeros[outer]-1;
1167 while ( (p > startId) && (m_data.index(p-1) > inner) )
1169 m_data.index(p) = m_data.index(p-1);
1170 m_data.value(p) = m_data.value(p-1);
1174 m_data.index(p) = convert_index(inner);
1175 return (m_data.value(p) = 0);
1178 if(m_data.size() != m_data.allocatedSize())
1181 m_data.resize(m_data.allocatedSize());
1185 return insertUncompressed(row,col);
1188 template<
typename _Scalar,
int _Options,
typename _Index>
1191 eigen_assert(!isCompressed());
1193 const Index outer = IsRowMajor ? row : col;
1194 const StorageIndex inner = convert_index(IsRowMajor ? col : row);
1196 Index room = m_outerIndex[outer+1] - m_outerIndex[outer];
1197 StorageIndex innerNNZ = m_innerNonZeros[outer];
1201 reserve(SingletonVector(outer,std::max<StorageIndex>(2,innerNNZ)));
1204 Index startId = m_outerIndex[outer];
1205 Index p = startId + m_innerNonZeros[outer];
1206 while ( (p > startId) && (m_data.index(p-1) > inner) )
1208 m_data.index(p) = m_data.index(p-1);
1209 m_data.value(p) = m_data.value(p-1);
1212 eigen_assert((p<=startId || m_data.index(p-1)!=inner) &&
"you cannot insert an element that already exists, you must call coeffRef to this end");
1214 m_innerNonZeros[outer]++;
1216 m_data.index(p) = inner;
1217 return (m_data.value(p) = 0);
1220 template<
typename _Scalar,
int _Options,
typename _Index>
1221 EIGEN_DONT_INLINE
typename SparseMatrix<_Scalar,_Options,_Index>::Scalar& SparseMatrix<_Scalar,_Options,_Index>::insertCompressed(Index row, Index col)
1223 eigen_assert(isCompressed());
1225 const Index outer = IsRowMajor ? row : col;
1226 const Index inner = IsRowMajor ? col : row;
1228 Index previousOuter = outer;
1229 if (m_outerIndex[outer+1]==0)
1232 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
1234 m_outerIndex[previousOuter] = convert_index(m_data.size());
1237 m_outerIndex[outer+1] = m_outerIndex[outer];
1243 bool isLastVec = (!(previousOuter==-1 && m_data.size()!=0))
1244 && (size_t(m_outerIndex[outer+1]) == m_data.size());
1246 size_t startId = m_outerIndex[outer];
1248 size_t p = m_outerIndex[outer+1];
1249 ++m_outerIndex[outer+1];
1251 double reallocRatio = 1;
1252 if (m_data.allocatedSize()<=m_data.size())
1255 if (m_data.size()==0)
1264 double nnzEstimate = double(m_outerIndex[outer])*double(m_outerSize)/double(outer+1);
1265 reallocRatio = (nnzEstimate-double(m_data.size()))/double(m_data.size());
1269 reallocRatio = (std::min)((std::max)(reallocRatio,1.5),8.);
1272 m_data.resize(m_data.size()+1,reallocRatio);
1276 if (previousOuter==-1)
1280 for (Index k=0; k<=(outer+1); ++k)
1281 m_outerIndex[k] = 0;
1283 while(m_outerIndex[k]==0)
1284 m_outerIndex[k++] = 1;
1285 while (k<=m_outerSize && m_outerIndex[k]!=0)
1286 m_outerIndex[k++]++;
1289 k = m_outerIndex[k]-1;
1292 m_data.index(k) = m_data.index(k-1);
1293 m_data.value(k) = m_data.value(k-1);
1302 while (j<=m_outerSize && m_outerIndex[j]!=0)
1303 m_outerIndex[j++]++;
1306 Index k = m_outerIndex[j]-1;
1309 m_data.index(k) = m_data.index(k-1);
1310 m_data.value(k) = m_data.value(k-1);
1316 while ( (p > startId) && (m_data.index(p-1) > inner) )
1318 m_data.index(p) = m_data.index(p-1);
1319 m_data.value(p) = m_data.value(p-1);
1323 m_data.index(p) = inner;
1324 return (m_data.value(p) = 0);
1329 template<
typename _Scalar,
int _Options,
typename _Index>
1330 struct evaluator<SparseMatrix<_Scalar,_Options,_Index> >
1331 : evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_Index> > >
1333 typedef evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_Index> > > Base;
1334 typedef SparseMatrix<_Scalar,_Options,_Index> SparseMatrixType;
1335 evaluator() : Base() {}
1336 explicit evaluator(
const SparseMatrixType &mat) : Base(mat) {}
1343 #endif // EIGEN_SPARSEMATRIX_H
StorageIndex * outerIndexPtr()
Definition: SparseMatrix.h:164
Scalar & insert(Index row, Index col)
Definition: SparseMatrix.h:1077
Index cols() const
Definition: SparseMatrix.h:132
Index size() const
Definition: SparseMatrixBase.h:157
Definition: Constants.h:314
void conservativeResize(Index rows, Index cols)
Definition: SparseMatrix.h:537
const unsigned int CompressedAccessBit
Definition: Constants.h:177
A versatible sparse matrix representation.
Definition: SparseMatrix.h:92
void uncompress()
Definition: SparseMatrix.h:483
void prune(const KeepFunc &keep=KeepFunc())
Definition: SparseMatrix.h:508
DiagonalReturnType diagonal()
Definition: SparseMatrix.h:641
~SparseMatrix()
Definition: SparseMatrix.h:801
A matrix or vector expression mapping an existing array of data.
Definition: Map.h:89
RowXpr row(Index i)
Definition: SparseMatrixBase.h:797
Scalar & coeffRef(Index row, Index col)
Definition: SparseMatrix.h:200
const unsigned int LvalueBit
Definition: Constants.h:130
SparseMatrix(const SparseMatrix &other)
Definition: SparseMatrix.h:682
const StorageIndex * outerIndexPtr() const
Definition: SparseMatrix.h:160
Pseudo expression to manipulate a triangular sparse matrix as a selfadjoint matrix.
Definition: SparseSelfAdjointView.h:43
StorageIndex * innerNonZeroPtr()
Definition: SparseMatrix.h:173
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
Definition: NumTraits.h:107
Derived & derived()
Definition: EigenBase.h:44
Index outerSize() const
Definition: SparseMatrix.h:137
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:37
const unsigned int RowMajorBit
Definition: Constants.h:53
SparseMatrix(const DiagonalBase< OtherDerived > &other)
Copy constructor with in-place evaluation.
Definition: SparseMatrix.h:701
Scalar * valuePtr()
Definition: SparseMatrix.h:146
void setFromTriplets(const InputIterators &begin, const InputIterators &end)
Definition: SparseMatrix.h:961
void setIdentity()
Definition: SparseMatrix.h:721
Base class of any sparse matrices or sparse expressions.
Definition: SparseMatrixBase.h:26
void prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
Definition: SparseMatrix.h:495
void swap(SparseMatrix &other)
Definition: SparseMatrix.h:710
void resize(Index rows, Index cols)
Definition: SparseMatrix.h:605
void reserve(Index reserveSize)
Definition: SparseMatrix.h:256
SparseMatrix(Index rows, Index cols)
Definition: SparseMatrix.h:652
void makeCompressed()
Definition: SparseMatrix.h:452
SparseMatrix(const SparseMatrixBase< OtherDerived > &other)
Definition: SparseMatrix.h:661
Scalar value_type
Definition: SparseMatrixBase.h:35
Scalar coeff(Index row, Index col) const
Definition: SparseMatrix.h:182
Index innerSize() const
Definition: SparseMatrix.h:135
const StorageIndex * innerIndexPtr() const
Definition: SparseMatrix.h:151
SparseMatrix(const ReturnByValue< OtherDerived > &other)
Copy constructor with in-place evaluation.
Definition: SparseMatrix.h:691
SparseMatrix()
Definition: SparseMatrix.h:644
Definition: Eigen_Colamd.h:54
StorageIndex * innerIndexPtr()
Definition: SparseMatrix.h:155
const ConstDiagonalReturnType diagonal() const
Definition: SparseMatrix.h:635
General-purpose arrays with easy API for coefficient-wise operations.
Definition: Array.h:45
const StorageIndex * innerNonZeroPtr() const
Definition: SparseMatrix.h:169
ColXpr col(Index i)
Definition: SparseMatrixBase.h:778
Definition: Constants.h:312
void setZero()
Definition: SparseMatrix.h:245
Definition: SparseMatrixBase.h:85
Expression of a diagonal/subdiagonal/superdiagonal in a matrix.
Definition: Diagonal.h:63
SparseMatrix(const SparseSelfAdjointView< OtherDerived, UpLo > &other)
Definition: SparseMatrix.h:674
const Scalar * valuePtr() const
Definition: SparseMatrix.h:142
Sparse matrix.
Definition: MappedSparseMatrix.h:32
Index rows() const
Definition: SparseMatrix.h:130
Scalar sum() const
Definition: SparseRedux.h:30