10 #ifndef EIGEN_SPARSE_BLOCK_H
11 #define EIGEN_SPARSE_BLOCK_H
16 template<
typename XprType,
int BlockRows,
int BlockCols>
17 class BlockImpl<XprType,BlockRows,BlockCols,true,Sparse>
18 :
public SparseMatrixBase<Block<XprType,BlockRows,BlockCols,true> >
20 typedef typename internal::remove_all<typename XprType::Nested>::type _MatrixTypeNested;
21 typedef Block<XprType, BlockRows, BlockCols, true> BlockType;
23 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
25 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
27 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
29 inline BlockImpl(const XprType& xpr, Index i)
30 : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize)
33 inline BlockImpl(
const XprType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
34 : m_matrix(xpr), m_outerStart(convert_index(IsRowMajor ? startRow : startCol)), m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols))
37 EIGEN_STRONG_INLINE Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
38 EIGEN_STRONG_INLINE Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
40 Index nonZeros()
const
42 typedef internal::evaluator<XprType> EvaluatorType;
43 EvaluatorType matEval(m_matrix);
45 Index end = m_outerStart + m_outerSize.value();
46 for(Index j=m_outerStart; j<end; ++j)
47 for(
typename EvaluatorType::InnerIterator it(matEval, j); it; ++it)
52 inline const Scalar coeff(Index row, Index col)
const
54 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
57 inline const Scalar coeff(Index index)
const
59 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
62 inline const _MatrixTypeNested& nestedExpression()
const {
return m_matrix; }
63 Index startRow()
const {
return IsRowMajor ? m_outerStart : 0; }
64 Index startCol()
const {
return IsRowMajor ? 0 : m_outerStart; }
65 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
66 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
70 typename XprType::Nested m_matrix;
72 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
75 EIGEN_INHERIT_ASSIGNMENT_OPERATORS(BlockImpl)
85 template<
typename SparseMatrixType,
int BlockRows,
int BlockCols>
86 class sparse_matrix_block_impl
87 :
public SparseCompressedBase<Block<SparseMatrixType,BlockRows,BlockCols,true> >
89 typedef typename internal::remove_all<typename SparseMatrixType::Nested>::type _MatrixTypeNested;
90 typedef Block<SparseMatrixType, BlockRows, BlockCols, true> BlockType;
92 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
93 typedef SparseCompressedBase<Block<SparseMatrixType,BlockRows,BlockCols,true> > Base;
94 _EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
96 typedef typename Base::IndexVector IndexVector;
97 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
100 inline sparse_matrix_block_impl(
const SparseMatrixType& xpr, Index i)
101 : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize)
104 inline sparse_matrix_block_impl(
const SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
105 : m_matrix(xpr), m_outerStart(convert_index(IsRowMajor ? startRow : startCol)), m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols))
108 template<
typename OtherDerived>
109 inline BlockType& operator=(
const SparseMatrixBase<OtherDerived>& other)
111 typedef typename internal::remove_all<typename SparseMatrixType::Nested>::type _NestedMatrixType;
112 _NestedMatrixType& matrix =
const_cast<_NestedMatrixType&
>(m_matrix);;
117 SparseMatrix<Scalar, IsRowMajor ? RowMajor : ColMajor, StorageIndex> tmp(other);
120 Index nnz = tmp.nonZeros();
121 Index start = m_outerStart==0 ? 0 : matrix.outerIndexPtr()[m_outerStart];
122 Index end = m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()];
123 Index block_size = end - start;
124 Index tail_size = m_matrix.outerIndexPtr()[m_matrix.outerSize()] - end;
126 Index free_size = m_matrix.isCompressed()
127 ?
Index(matrix.data().allocatedSize()) + block_size
133 typename SparseMatrixType::Storage newdata(m_matrix.data().allocatedSize() - block_size + nnz);
135 internal::smart_copy(&m_matrix.data().value(0), &m_matrix.data().value(0) + start, &newdata.value(0));
136 internal::smart_copy(&m_matrix.data().index(0), &m_matrix.data().index(0) + start, &newdata.index(0));
138 internal::smart_copy(&tmp.data().value(0), &tmp.data().value(0) + nnz, &newdata.value(start));
139 internal::smart_copy(&tmp.data().index(0), &tmp.data().index(0) + nnz, &newdata.index(start));
141 internal::smart_copy(&matrix.data().value(end), &matrix.data().value(end) + tail_size, &newdata.value(start+nnz));
142 internal::smart_copy(&matrix.data().index(end), &matrix.data().index(end) + tail_size, &newdata.index(start+nnz));
144 newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
146 matrix.data().swap(newdata);
151 matrix.data().resize(start + nnz + tail_size);
153 internal::smart_memmove(&matrix.data().value(end), &matrix.data().value(end) + tail_size, &matrix.data().value(start + nnz));
154 internal::smart_memmove(&matrix.data().index(end), &matrix.data().index(end) + tail_size, &matrix.data().index(start + nnz));
156 internal::smart_copy(&tmp.data().value(0), &tmp.data().value(0) + nnz, &matrix.data().value(start));
157 internal::smart_copy(&tmp.data().index(0), &tmp.data().index(0) + nnz, &matrix.data().index(start));
161 if(!m_matrix.isCompressed())
162 for(Index j=0; j<m_outerSize.value(); ++j)
163 matrix.innerNonZeroPtr()[m_outerStart+j] = StorageIndex(tmp.innerVector(j).nonZeros());
166 StorageIndex p = StorageIndex(start);
167 for(Index k=0; k<m_outerSize.value(); ++k)
169 matrix.outerIndexPtr()[m_outerStart+k] = p;
170 p += tmp.innerVector(k).nonZeros();
172 StorageIndex offset = internal::convert_index<StorageIndex>(nnz - block_size);
173 for(Index k = m_outerStart + m_outerSize.value(); k<=matrix.outerSize(); ++k)
175 matrix.outerIndexPtr()[k] += offset;
181 inline BlockType& operator=(
const BlockType& other)
183 return operator=<BlockType>(other);
186 inline const Scalar* valuePtr()
const
187 {
return m_matrix.valuePtr(); }
188 inline Scalar* valuePtr()
189 {
return m_matrix.const_cast_derived().valuePtr(); }
191 inline const StorageIndex* innerIndexPtr()
const
192 {
return m_matrix.innerIndexPtr(); }
193 inline StorageIndex* innerIndexPtr()
194 {
return m_matrix.const_cast_derived().innerIndexPtr(); }
196 inline const StorageIndex* outerIndexPtr()
const
197 {
return m_matrix.outerIndexPtr() + m_outerStart; }
198 inline StorageIndex* outerIndexPtr()
199 {
return m_matrix.const_cast_derived().outerIndexPtr() + m_outerStart; }
201 inline const StorageIndex* innerNonZeroPtr()
const
202 {
return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr()+m_outerStart); }
203 inline StorageIndex* innerNonZeroPtr()
204 {
return isCompressed() ? 0 : (m_matrix.const_cast_derived().innerNonZeroPtr()+m_outerStart); }
206 bool isCompressed()
const {
return m_matrix.innerNonZeroPtr()==0; }
208 inline Scalar& coeffRef(Index row, Index col)
210 return m_matrix.const_cast_derived().coeffRef(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
213 inline const Scalar coeff(Index row, Index col)
const
215 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
218 inline const Scalar coeff(Index index)
const
220 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
223 const Scalar& lastCoeff()
const
225 EIGEN_STATIC_ASSERT_VECTOR_ONLY(sparse_matrix_block_impl);
226 eigen_assert(Base::nonZeros()>0);
227 if(m_matrix.isCompressed())
228 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
230 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];
233 EIGEN_STRONG_INLINE
Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
234 EIGEN_STRONG_INLINE
Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
236 inline const _MatrixTypeNested& nestedExpression()
const {
return m_matrix; }
237 Index startRow()
const {
return IsRowMajor ? m_outerStart : 0; }
238 Index startCol()
const {
return IsRowMajor ? 0 : m_outerStart; }
239 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
240 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
244 typename SparseMatrixType::Nested m_matrix;
246 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
252 template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int BlockRows,
int BlockCols>
253 class BlockImpl<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true,Sparse>
254 :
public internal::sparse_matrix_block_impl<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols>
257 typedef _StorageIndex StorageIndex;
258 typedef SparseMatrix<_Scalar, _Options, _StorageIndex> SparseMatrixType;
259 typedef internal::sparse_matrix_block_impl<SparseMatrixType,BlockRows,BlockCols> Base;
260 inline BlockImpl(SparseMatrixType& xpr, Index i)
264 inline BlockImpl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
265 : Base(xpr, startRow, startCol, blockRows, blockCols)
268 using Base::operator=;
271 template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int BlockRows,
int BlockCols>
272 class BlockImpl<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true,Sparse>
273 :
public internal::sparse_matrix_block_impl<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols>
276 typedef _StorageIndex StorageIndex;
277 typedef const SparseMatrix<_Scalar, _Options, _StorageIndex> SparseMatrixType;
278 typedef internal::sparse_matrix_block_impl<SparseMatrixType,BlockRows,BlockCols> Base;
279 inline BlockImpl(SparseMatrixType& xpr, Index i)
283 inline BlockImpl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
284 : Base(xpr, startRow, startCol, blockRows, blockCols)
287 using Base::operator=;
289 template<
typename Derived> BlockImpl(
const SparseMatrixBase<Derived>& xpr, Index i);
290 template<
typename Derived> BlockImpl(
const SparseMatrixBase<Derived>& xpr);
298 template<
typename Derived>
305 template<
typename Derived>
312 template<
typename Derived>
317 IsRowMajor ? outerStart : 0, IsRowMajor ? 0 : outerStart,
318 IsRowMajor ? outerSize : rows(), IsRowMajor ? cols() : outerSize);
325 template<
typename Derived>
330 IsRowMajor ? outerStart : 0, IsRowMajor ? 0 : outerStart,
331 IsRowMajor ? outerSize : rows(), IsRowMajor ? cols() : outerSize);
338 template<
typename XprType,
int BlockRows,
int BlockCols,
bool InnerPanel>
339 class BlockImpl<XprType,BlockRows,BlockCols,InnerPanel,
Sparse>
340 :
public SparseMatrixBase<Block<XprType,BlockRows,BlockCols,InnerPanel> >, internal::no_assignment_operator
344 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
345 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
347 typedef typename internal::remove_all<typename XprType::Nested>::type _MatrixTypeNested;
353 m_startRow( (BlockRows==1) && (BlockCols==XprType::ColsAtCompileTime) ? convert_index(i) : 0),
354 m_startCol( (BlockRows==XprType::RowsAtCompileTime) && (BlockCols==1) ? convert_index(i) : 0),
355 m_blockRows(BlockRows==1 ? 1 : xpr.rows()),
356 m_blockCols(BlockCols==1 ? 1 : xpr.cols())
361 inline BlockImpl(
const XprType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
362 : m_matrix(xpr), m_startRow(convert_index(startRow)), m_startCol(convert_index(startCol)), m_blockRows(convert_index(blockRows)), m_blockCols(convert_index(blockCols))
365 inline Index rows()
const {
return m_blockRows.value(); }
366 inline Index cols()
const {
return m_blockCols.value(); }
368 inline Scalar& coeffRef(Index row, Index col)
370 return m_matrix.const_cast_derived()
371 .coeffRef(row + m_startRow.value(), col + m_startCol.value());
374 inline const Scalar coeff(Index row, Index col)
const
376 return m_matrix.coeff(row + m_startRow.value(), col + m_startCol.value());
379 inline Scalar& coeffRef(Index index)
381 return m_matrix.const_cast_derived()
382 .coeffRef(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
383 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
386 inline const Scalar coeff(Index index)
const
389 .coeff(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
390 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
393 inline const _MatrixTypeNested& nestedExpression()
const {
return m_matrix; }
394 Index startRow()
const {
return m_startRow.value(); }
395 Index startCol()
const {
return m_startCol.value(); }
396 Index blockRows()
const {
return m_blockRows.value(); }
397 Index blockCols()
const {
return m_blockCols.value(); }
401 friend class ReverseInnerIterator;
402 friend struct internal::unary_evaluator<Block<XprType,BlockRows,BlockCols,InnerPanel>,
internal::IteratorBased, Scalar >;
404 Index nonZeros() const {
return Dynamic; }
406 EIGEN_INHERIT_ASSIGNMENT_OPERATORS(BlockImpl)
408 typename XprType::Nested m_matrix;
409 const
internal::variable_if_dynamic<Index, XprType::RowsAtCompileTime == 1 ? 0 : Dynamic> m_startRow;
410 const
internal::variable_if_dynamic<Index, XprType::ColsAtCompileTime == 1 ? 0 : Dynamic> m_startCol;
411 const
internal::variable_if_dynamic<Index, RowsAtCompileTime> m_blockRows;
412 const
internal::variable_if_dynamic<Index, ColsAtCompileTime> m_blockCols;
418 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
419 struct unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased >
420 :
public evaluator_base<Block<ArgType,BlockRows,BlockCols,InnerPanel> >
422 class InnerVectorInnerIterator;
423 class OuterVectorInnerIterator;
425 typedef Block<ArgType,BlockRows,BlockCols,InnerPanel> XprType;
426 typedef typename XprType::StorageIndex StorageIndex;
427 typedef typename XprType::Scalar Scalar;
429 class ReverseInnerIterator;
432 IsRowMajor = XprType::IsRowMajor,
434 OuterVector = (BlockCols==1 && ArgType::IsRowMajor)
437 (BlockRows==1 && !ArgType::IsRowMajor),
439 CoeffReadCost = evaluator<ArgType>::CoeffReadCost,
440 Flags = XprType::Flags
443 typedef typename internal::conditional<OuterVector,OuterVectorInnerIterator,InnerVectorInnerIterator>::type InnerIterator;
445 explicit unary_evaluator(
const XprType& op)
446 : m_argImpl(op.nestedExpression()), m_block(op)
449 inline Index nonZerosEstimate()
const {
450 Index nnz = m_block.nonZeros();
452 return m_argImpl.nonZerosEstimate() * m_block.size() / m_block.nestedExpression().size();
457 typedef typename evaluator<ArgType>::InnerIterator EvalIterator;
459 evaluator<ArgType> m_argImpl;
460 const XprType &m_block;
463 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
464 class unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased>::InnerVectorInnerIterator
465 :
public EvalIterator
467 const XprType& m_block;
471 EIGEN_STRONG_INLINE InnerVectorInnerIterator(
const unary_evaluator& aEval, Index outer)
472 : EvalIterator(aEval.m_argImpl, outer + (IsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol())),
473 m_block(aEval.m_block),
474 m_end(IsRowMajor ? aEval.m_block.startCol()+aEval.m_block.blockCols() : aEval.m_block.startRow()+aEval.m_block.blockRows())
476 while( (EvalIterator::operator
bool()) && (EvalIterator::index() < (IsRowMajor ? m_block.startCol() : m_block.startRow())) )
477 EvalIterator::operator++();
480 inline StorageIndex index()
const {
return EvalIterator::index() - convert_index<StorageIndex>(IsRowMajor ? m_block.startCol() : m_block.startRow()); }
481 inline Index outer()
const {
return EvalIterator::outer() - (IsRowMajor ? m_block.startRow() : m_block.startCol()); }
482 inline Index row()
const {
return EvalIterator::row() - m_block.startRow(); }
483 inline Index col()
const {
return EvalIterator::col() - m_block.startCol(); }
485 inline operator bool()
const {
return EvalIterator::operator bool() && EvalIterator::index() < m_end; }
488 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
489 class unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased>::OuterVectorInnerIterator
491 const unary_evaluator& m_eval;
498 EIGEN_STRONG_INLINE OuterVectorInnerIterator(
const unary_evaluator& aEval, Index outer)
500 m_outerPos( (IsRowMajor ? aEval.m_block.startCol() : aEval.m_block.startRow()) - 1),
501 m_innerIndex(IsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol()),
503 m_end(IsRowMajor ? aEval.m_block.startCol()+aEval.m_block.blockCols() : aEval.m_block.startRow()+aEval.m_block.blockRows())
505 EIGEN_UNUSED_VARIABLE(outer);
506 eigen_assert(outer==0);
511 inline StorageIndex index()
const {
return convert_index<StorageIndex>(m_outerPos - (IsRowMajor ? m_eval.m_block.startCol() : m_eval.m_block.startRow())); }
512 inline Index outer()
const {
return 0; }
513 inline Index row()
const {
return IsRowMajor ? 0 : index(); }
514 inline Index col()
const {
return IsRowMajor ? index() : 0; }
516 inline Scalar value()
const {
return m_value; }
518 inline OuterVectorInnerIterator& operator++()
521 while(++m_outerPos<m_end)
523 EvalIterator it(m_eval.m_argImpl, m_outerPos);
525 while(it && it.index() < m_innerIndex) ++it;
526 if(it && it.index()==m_innerIndex)
528 m_value = it.value();
535 inline operator bool()
const {
return m_outerPos < m_end; }
538 template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int BlockRows,
int BlockCols>
539 struct unary_evaluator<Block<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true>, IteratorBased>
540 : evaluator<SparseCompressedBase<Block<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true> > >
542 typedef Block<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,
true> XprType;
543 typedef evaluator<SparseCompressedBase<XprType> > Base;
544 explicit unary_evaluator(
const XprType &xpr) : Base(xpr) {}
547 template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int BlockRows,
int BlockCols>
548 struct unary_evaluator<Block<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true>, IteratorBased>
549 : evaluator<SparseCompressedBase<Block<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true> > >
551 typedef Block<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,
true> XprType;
552 typedef evaluator<SparseCompressedBase<XprType> > Base;
553 explicit unary_evaluator(
const XprType &xpr) : Base(xpr) {}
561 #endif // EIGEN_SPARSE_BLOCK_H
Derived & derived()
Definition: EigenBase.h:44
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:37
Base class of any sparse matrices or sparse expressions.
Definition: SparseMatrixBase.h:26
InnerVectorReturnType innerVector(Index outer)
Definition: SparseBlock.h:299
Index cols() const
Definition: SparseMatrixBase.h:154
Definition: Constants.h:485
BlockImpl(const XprType &xpr, Index i)
Definition: SparseBlock.h:351
Definition: Eigen_Colamd.h:54
InnerVectorsReturnType innerVectors(Index outerStart, Index outerSize)
Definition: SparseBlock.h:314
Expression of a fixed-size or dynamic-size block.
Definition: Block.h:104
BlockImpl(const XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition: SparseBlock.h:361
Index rows() const
Definition: SparseMatrixBase.h:152