10 #ifndef EIGEN_SPARSE_BLOCK_H
11 #define EIGEN_SPARSE_BLOCK_H
18 template<
typename XprType,
int BlockRows,
int BlockCols>
25 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
27 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
34 : m_matrix(xpr), m_outerStart(
convert_index(
i)), m_outerSize(OuterSize)
38 : m_matrix(xpr), m_outerStart(
convert_index(IsRowMajor ? startRow : startCol)), m_outerSize(
convert_index(IsRowMajor ? blockRows : blockCols))
41 EIGEN_STRONG_INLINE
Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
42 EIGEN_STRONG_INLINE
Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
46 typedef internal::evaluator<XprType> EvaluatorType;
47 EvaluatorType matEval(m_matrix);
49 Index end = m_outerStart + m_outerSize.value();
51 for(
typename EvaluatorType::InnerIterator it(matEval,
j); it; ++it)
58 return m_matrix.coeff(
row + (IsRowMajor ? m_outerStart : 0),
col + (IsRowMajor ? 0 : m_outerStart));
63 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
70 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
71 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
75 typename internal::ref_selector<XprType>::non_const_type
m_matrix;
77 const internal::variable_if_dynamic<Index, OuterSize>
m_outerSize;
97 template<
typename SparseMatrixType,
int BlockRows,
int BlockCols>
98 class sparse_matrix_block_impl
99 :
public SparseCompressedBase<Block<SparseMatrixType,BlockRows,BlockCols,true> >
101 typedef internal::remove_all_t<typename SparseMatrixType::Nested> MatrixTypeNested_;
102 typedef Block<SparseMatrixType, BlockRows, BlockCols, true> BlockType;
103 typedef SparseCompressedBase<Block<SparseMatrixType,BlockRows,BlockCols,true> > Base;
106 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
110 enum { OuterSize =
IsRowMajor ? BlockRows : BlockCols };
113 inline sparse_matrix_block_impl(SparseMatrixType& xpr,
Index i)
114 : m_matrix(xpr), m_outerStart(
convert_index(
i)), m_outerSize(OuterSize)
117 inline sparse_matrix_block_impl(SparseMatrixType& xpr,
Index startRow,
Index startCol,
Index blockRows,
Index blockCols)
121 template<
typename OtherDerived>
122 inline BlockType&
operator=(
const SparseMatrixBase<OtherDerived>& other)
124 typedef internal::remove_all_t<typename SparseMatrixType::Nested> NestedMatrixType_;
125 NestedMatrixType_& matrix = m_matrix;
130 Ref<const SparseMatrix<Scalar, IsRowMajor ? RowMajor : ColMajor, StorageIndex> > tmp(other.derived());
134 Index nnz = tmp.nonZeros();
135 Index start = m_outerStart==0 ? 0 : m_matrix.outerIndexPtr()[m_outerStart];
136 Index end = m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()];
138 Index tail_size = m_matrix.outerIndexPtr()[m_matrix.outerSize()] -
end;
140 Index free_size = m_matrix.isCompressed()
141 ?
Index(matrix.data().allocatedSize()) + block_size
144 Index tmp_start = tmp.outerIndexPtr()[0];
146 bool update_trailing_pointers =
false;
150 typename SparseMatrixType::Storage newdata(m_matrix.data().allocatedSize() - block_size + nnz);
153 internal::smart_copy(m_matrix.innerIndexPtr(), m_matrix.innerIndexPtr() + start, newdata.indexPtr());
155 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, newdata.valuePtr() + start);
156 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz, newdata.indexPtr() + start);
161 newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
163 matrix.data().swap(newdata);
165 update_trailing_pointers =
true;
169 if(m_matrix.isCompressed() && nnz!=block_size)
172 matrix.data().resize(start + nnz + tail_size);
177 update_trailing_pointers =
true;
180 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, matrix.valuePtr() + start);
181 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz, matrix.innerIndexPtr() + start);
185 if(IsVectorAtCompileTime)
187 if(!m_matrix.isCompressed())
188 matrix.innerNonZeroPtr()[m_outerStart] =
StorageIndex(nnz);
189 matrix.outerIndexPtr()[m_outerStart] =
StorageIndex(start);
194 for(
Index k=0; k<m_outerSize.value(); ++k)
196 StorageIndex nnz_k = internal::convert_index<StorageIndex>(tmp.innerVector(k).nonZeros());
197 if(!m_matrix.isCompressed())
198 matrix.innerNonZeroPtr()[m_outerStart+k] = nnz_k;
199 matrix.outerIndexPtr()[m_outerStart+k] =
p;
204 if(update_trailing_pointers)
206 StorageIndex offset = internal::convert_index<StorageIndex>(nnz - block_size);
207 for(
Index k = m_outerStart + m_outerSize.value(); k<=matrix.outerSize(); ++k)
209 matrix.outerIndexPtr()[k] += offset;
216 inline BlockType&
operator=(
const BlockType& other)
218 return operator=<BlockType>(other);
222 {
return m_matrix.valuePtr(); }
224 {
return m_matrix.valuePtr(); }
227 {
return m_matrix.innerIndexPtr(); }
229 {
return m_matrix.innerIndexPtr(); }
232 {
return m_matrix.outerIndexPtr() + m_outerStart; }
234 {
return m_matrix.outerIndexPtr() + m_outerStart; }
237 {
return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr()+m_outerStart); }
239 {
return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr()+m_outerStart); }
241 bool isCompressed()
const {
return m_matrix.innerNonZeroPtr()==0; }
245 return m_matrix.coeffRef(
row + (IsRowMajor ? m_outerStart : 0),
col + (IsRowMajor ? 0 : m_outerStart));
250 return m_matrix.coeff(
row + (IsRowMajor ? m_outerStart : 0),
col + (IsRowMajor ? 0 : m_outerStart));
255 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
258 const Scalar& lastCoeff()
const
262 if(m_matrix.isCompressed())
263 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
265 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];
268 EIGEN_STRONG_INLINE
Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
269 EIGEN_STRONG_INLINE
Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
271 inline const SparseMatrixType& nestedExpression()
const {
return m_matrix; }
272 inline SparseMatrixType& nestedExpression() {
return m_matrix; }
275 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
276 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
280 typename internal::ref_selector<SparseMatrixType>::non_const_type m_matrix;
282 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
288 template<
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
290 :
public internal::sparse_matrix_block_impl<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols>
295 typedef internal::sparse_matrix_block_impl<SparseMatrixType,BlockRows,BlockCols>
Base;
301 :
Base(xpr, startRow, startCol, blockRows, blockCols)
304 using Base::operator=;
307 template<
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
309 :
public internal::sparse_matrix_block_impl<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols>
314 typedef internal::sparse_matrix_block_impl<SparseMatrixType,BlockRows,BlockCols>
Base;
320 :
Base(xpr, startRow, startCol, blockRows, blockCols)
323 using Base::operator=;
334 template<
typename XprType,
int BlockRows,
int BlockCols,
bool InnerPanel>
336 :
public SparseMatrixBase<Block<XprType,BlockRows,BlockCols,InnerPanel> >, internal::no_assignment_operator
342 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
351 m_startRow( (BlockRows==1) && (BlockCols==XprType::ColsAtCompileTime) ?
convert_index(
i) : 0),
352 m_startCol( (BlockRows==XprType::RowsAtCompileTime) && (BlockCols==1) ?
convert_index(
i) : 0),
353 m_blockRows(BlockRows==1 ? 1 : xpr.
rows()),
354 m_blockCols(BlockCols==1 ? 1 : xpr.
cols())
363 inline Index rows()
const {
return m_blockRows.value(); }
364 inline Index cols()
const {
return m_blockCols.value(); }
368 return m_matrix.coeffRef(
row + m_startRow.value(),
col + m_startCol.value());
373 return m_matrix.coeff(
row + m_startRow.value(),
col + m_startCol.value());
378 return m_matrix.coeffRef(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
379 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
384 return m_matrix.coeff(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
385 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
397 friend struct internal::unary_evaluator<
Block<XprType,BlockRows,BlockCols,InnerPanel>,
internal::IteratorBased, Scalar >;
401 typename internal::ref_selector<XprType>::non_const_type
m_matrix;
402 const internal::variable_if_dynamic<Index, XprType::RowsAtCompileTime == 1 ? 0 : Dynamic>
m_startRow;
403 const internal::variable_if_dynamic<Index, XprType::ColsAtCompileTime == 1 ? 0 : Dynamic>
m_startCol;
404 const internal::variable_if_dynamic<Index, RowsAtCompileTime>
m_blockRows;
405 const internal::variable_if_dynamic<Index, ColsAtCompileTime>
m_blockCols;
421 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
422 struct unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased >
423 :
public evaluator_base<Block<ArgType,BlockRows,BlockCols,InnerPanel> >
425 class InnerVectorInnerIterator;
426 class OuterVectorInnerIterator;
428 typedef Block<ArgType,BlockRows,BlockCols,InnerPanel> XprType;
429 typedef typename XprType::StorageIndex StorageIndex;
430 typedef typename XprType::Scalar Scalar;
433 IsRowMajor = XprType::IsRowMajor,
434 OuterVector = (BlockCols == 1 && ArgType::IsRowMajor) || (BlockRows == 1 && !ArgType::IsRowMajor),
435 CoeffReadCost = evaluator<ArgType>::CoeffReadCost,
436 Flags = XprType::Flags
439 typedef std::conditional_t<OuterVector,OuterVectorInnerIterator,InnerVectorInnerIterator> InnerIterator;
441 explicit unary_evaluator(
const XprType& op)
442 : m_argImpl(op.nestedExpression()), m_block(op)
445 inline Index nonZerosEstimate()
const {
446 const Index nnz = m_block.nonZeros();
450 const Index nested_sz = m_block.nestedExpression().size();
451 return nested_sz == 0 ? 0 : m_argImpl.nonZerosEstimate() * m_block.size() / nested_sz;
457 typedef typename evaluator<ArgType>::InnerIterator
EvalIterator;
459 evaluator<ArgType> m_argImpl;
460 const XprType &m_block;
463 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
464 class unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased>::InnerVectorInnerIterator
470 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
471 const XprType& m_block;
475 EIGEN_STRONG_INLINE InnerVectorInnerIterator(
const unary_evaluator& aEval,
Index outer)
476 :
EvalIterator(aEval.m_argImpl, outer + (XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol())),
477 m_block(aEval.m_block),
478 m_end(XprIsRowMajor ? aEval.m_block.startCol()+aEval.m_block.blockCols() : aEval.m_block.startRow()+aEval.m_block.blockRows())
480 while( (EvalIterator::operator
bool()) && (EvalIterator::index() < (XprIsRowMajor ? m_block.startCol() : m_block.startRow())) )
484 inline StorageIndex index()
const {
return EvalIterator::index() - convert_index<StorageIndex>(XprIsRowMajor ? m_block.startCol() : m_block.startRow()); }
485 inline Index outer()
const {
return EvalIterator::outer() - (XprIsRowMajor ? m_block.startRow() : m_block.startCol()); }
489 inline operator bool()
const {
return EvalIterator::operator
bool() && EvalIterator::index() < m_end; }
492 template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
493 class unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased>::OuterVectorInnerIterator
496 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
497 const unary_evaluator& m_eval;
499 const Index m_innerIndex;
504 EIGEN_STRONG_INLINE OuterVectorInnerIterator(
const unary_evaluator& aEval,
Index outer)
506 m_outerPos( (XprIsRowMajor ? aEval.m_block.startCol() : aEval.m_block.startRow()) ),
507 m_innerIndex(XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol()),
508 m_end(XprIsRowMajor ? aEval.m_block.startCol()+aEval.m_block.blockCols() : aEval.m_block.startRow()+aEval.m_block.blockRows()),
509 m_it(m_eval.m_argImpl, m_outerPos)
514 while(m_it && m_it.index() < m_innerIndex) ++m_it;
515 if((!m_it) || (m_it.index()!=m_innerIndex))
519 inline StorageIndex index()
const {
return convert_index<StorageIndex>(m_outerPos - (XprIsRowMajor ? m_eval.m_block.startCol() : m_eval.m_block.startRow())); }
520 inline Index outer()
const {
return 0; }
521 inline Index row()
const {
return XprIsRowMajor ? 0 : index(); }
522 inline Index col()
const {
return XprIsRowMajor ? index() : 0; }
524 inline Scalar value()
const {
return m_it.value(); }
525 inline Scalar& valueRef() {
return m_it.valueRef(); }
530 while(++m_outerPos<m_end)
536 while(m_it && m_it.index() < m_innerIndex) ++m_it;
537 if(m_it && m_it.index()==m_innerIndex)
break;
542 inline operator bool()
const {
return m_outerPos < m_end; }
545 template<
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
546 struct unary_evaluator<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true>, IteratorBased>
547 : evaluator<SparseCompressedBase<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true> > >
549 typedef Block<SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,
true> XprType;
550 typedef evaluator<SparseCompressedBase<XprType> > Base;
551 explicit unary_evaluator(
const XprType &xpr) : Base(xpr) {}
554 template<
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
555 struct unary_evaluator<Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true>, IteratorBased>
556 : evaluator<SparseCompressedBase<Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,true> > >
558 typedef Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>,BlockRows,BlockCols,
true> XprType;
559 typedef evaluator<SparseCompressedBase<XprType> > Base;
560 explicit unary_evaluator(
const XprType &xpr) : Base(xpr) {}
RowXpr row(Index i)
This is the const version of row(). */.
ColXpr col(Index i)
This is the const version of col().
#define eigen_internal_assert(x)
#define EIGEN_UNUSED_VARIABLE(var)
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
#define EIGEN_STATIC_ASSERT(X, MSG)
#define EIGEN_STATIC_ASSERT_VECTOR_ONLY(TYPE)
StorageIndex_ StorageIndex
BlockImpl(SparseMatrixType &xpr, Index i)
SparseMatrix< Scalar_, Options_, StorageIndex_ > SparseMatrixType
internal::sparse_matrix_block_impl< SparseMatrixType, BlockRows, BlockCols > Base
BlockImpl(SparseMatrixType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
XprType & nestedExpression()
Scalar & coeffRef(Index index)
const Scalar coeff(Index index) const
internal::ref_selector< XprType >::non_const_type m_matrix
const internal::variable_if_dynamic< Index, XprType::ColsAtCompileTime==1 ? 0 :Dynamic > m_startCol
BlockImpl & operator=(const T &)
BlockImpl(XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
internal::remove_all_t< typename XprType::Nested > MatrixTypeNested_
const internal::variable_if_dynamic< Index, RowsAtCompileTime > m_blockRows
const internal::variable_if_dynamic< Index, ColsAtCompileTime > m_blockCols
Block< XprType, BlockRows, BlockCols, InnerPanel > BlockType
const Scalar coeff(Index row, Index col) const
Scalar & coeffRef(Index row, Index col)
SparseMatrixBase< BlockType > Base
const XprType & nestedExpression() const
const internal::variable_if_dynamic< Index, XprType::RowsAtCompileTime==1 ? 0 :Dynamic > m_startRow
const XprType & nestedExpression() const
Block< XprType, BlockRows, BlockCols, true > BlockType
const internal::variable_if_dynamic< Index, OuterSize > m_outerSize
const Scalar coeff(Index row, Index col) const
internal::ref_selector< XprType >::non_const_type m_matrix
BlockImpl(XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
XprType & nestedExpression()
SparseMatrixBase< BlockType > Base
BlockImpl & operator=(const T &)
internal::remove_all_t< typename XprType::Nested > MatrixTypeNested_
const Scalar coeff(Index index) const
BlockImpl(SparseMatrixType &xpr, Index i)
BlockImpl(SparseMatrixType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
StorageIndex_ StorageIndex
BlockImpl(const SparseMatrixBase< Derived > &xpr)
BlockImpl(const SparseMatrixBase< Derived > &xpr, Index i)
const SparseMatrix< Scalar_, Options_, StorageIndex_ > SparseMatrixType
internal::sparse_matrix_block_impl< SparseMatrixType, BlockRows, BlockCols > Base
Expression of a fixed-size or dynamic-size block.
const Scalar * valuePtr() const
const StorageIndex * outerIndexPtr() const
Block< SparseMatrixType, BlockRows, BlockCols, true > & operator=(const EigenBase< OtherDerived > &other)
bool isCompressed() const
const StorageIndex * innerIndexPtr() const
Base::IndexVector IndexVector
const StorageIndex * innerNonZeroPtr() const
Base class of any sparse matrices or sparse expressions.
internal::traits< Block< SparseMatrixType, BlockRows, BlockCols, true > >::StorageIndex StorageIndex
internal::traits< Block< XprType, BlockRows, BlockCols, true > >::Scalar Scalar
static StorageIndex convert_index(const Index idx)
A versatible sparse matrix representation.
static const lastp1_t end
bfloat16 operator++(bfloat16 &a)
typename remove_all< T >::type remove_all_t
IndexDest convert_index(const IndexSrc &idx)
T * construct_at(T *p, Args &&... args)
void smart_memmove(const T *start, const T *end, T *target)
void smart_copy(const T *start, const T *end, T *target)
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Block< SparseMatrixType, BlockRows, BlockCols, true > & derived()
Eigen::Index Index
The interface type of indices.