10 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
11 #define EIGEN_SPARSE_COMPRESSED_BASE_H
17 template<
typename Derived>
class SparseCompressedBase;
21 template<
typename Derived>
22 struct traits<SparseCompressedBase<Derived> > : traits<Derived>
25 template <
typename Derived,
class Comp,
bool IsVector>
26 struct inner_sort_impl;
40 template<
typename Derived>
47 using Base::operator=;
51 class ReverseInnerIterator;
135 template <
class Comp = std::less<>>
138 internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::run(*
this, begin,
end);
143 template <
class Comp = std::less<>>
146 return internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::check(*
this, begin,
end);
151 template <
class Comp = std::less<>>
155 internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::run(*
this, begin,
end);
160 template<
class Comp = std::less<>>
164 return internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::check(*
this, begin,
end);
178 const Index outer = Derived::IsRowMajor ?
row :
col;
179 const Index inner = Derived::IsRowMajor ?
col :
row;
183 eigen_assert(
end>=start &&
"you are using a non finalized sparse matrix or written coefficient does not exist");
184 internal::LowerBoundIndex
p;
196 template<
typename Derived>
201 : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0)
204 InnerIterator(
const InnerIterator& other)
205 : m_values(other.m_values), m_indices(other.m_indices), m_outer(other.m_outer), m_id(other.m_id), m_end(other.m_end)
208 InnerIterator&
operator=(
const InnerIterator& other)
210 m_values = other.m_values;
211 m_indices = other.m_indices;
212 const_cast<OuterType&
>(m_outer).setValue(other.m_outer.value());
221 if(Derived::IsVectorAtCompileTime &&
mat.outerIndexPtr()==0)
224 m_end =
mat.nonZeros();
228 m_id =
mat.outerIndexPtr()[outer];
229 if(
mat.isCompressed())
230 m_end =
mat.outerIndexPtr()[outer+1];
232 m_end = m_id +
mat.innerNonZeroPtr()[outer];
241 explicit InnerIterator(
const internal::CompressedStorage<Scalar,StorageIndex>&
data)
247 inline InnerIterator&
operator++() { m_id++;
return *
this; }
252 InnerIterator result = *
this;
257 inline const Scalar& value()
const {
return m_values[m_id]; }
258 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_values[m_id]); }
260 inline StorageIndex index()
const {
return m_indices[m_id]; }
261 inline Index outer()
const {
return m_outer.value(); }
265 inline operator bool()
const {
return (m_id < m_end); }
270 typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
271 const OuterType m_outer;
278 template<
typename T> InnerIterator(
const SparseMatrixBase<T>&,
Index outer);
281 template<
typename Derived>
282 class SparseCompressedBase<Derived>::ReverseInnerIterator
288 if(Derived::IsVectorAtCompileTime &&
mat.outerIndexPtr()==0)
291 m_id =
mat.nonZeros();
295 m_start =
mat.outerIndexPtr()[outer];
296 if(
mat.isCompressed())
297 m_id =
mat.outerIndexPtr()[outer+1];
299 m_id = m_start +
mat.innerNonZeroPtr()[outer];
309 explicit ReverseInnerIterator(
const internal::CompressedStorage<Scalar,StorageIndex>&
data)
315 inline ReverseInnerIterator&
operator--() { --m_id;
return *
this; }
320 ReverseInnerIterator result = *
this;
325 inline const Scalar& value()
const {
return m_values[m_id-1]; }
326 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_values[m_id-1]); }
328 inline StorageIndex index()
const {
return m_indices[m_id-1]; }
329 inline Index outer()
const {
return m_outer.value(); }
333 inline operator bool()
const {
return (m_id > m_start); }
338 typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
339 const OuterType m_outer;
348 template <
typename Scalar,
typename StorageIndex>
350 template <
typename Scalar,
typename StorageIndex>
352 template <
typename Scalar,
typename StorageIndex>
353 class CompressedStorageIterator;
356 template <
typename Scalar,
typename StorageIndex>
361 StorageVal(
const StorageIndex& innerIndex,
const Scalar& value) : m_innerIndex(innerIndex), m_value(value) {}
362 StorageVal(
const StorageVal& other) : m_innerIndex(other.m_innerIndex), m_value(other.m_value) {}
363 StorageVal(StorageVal&& other) =
default;
365 inline const StorageIndex& key()
const {
return m_innerIndex; }
366 inline StorageIndex& key() {
return m_innerIndex; }
367 inline const Scalar& value()
const {
return m_value; }
368 inline Scalar& value() {
return m_value; }
371 inline operator StorageIndex()
const {
return m_innerIndex; }
374 StorageIndex m_innerIndex;
377 StorageVal() =
delete;
381 template <
typename Scalar,
typename StorageIndex>
385 using value_type = StorageVal<Scalar, StorageIndex>;
388 StorageRef(StorageRef&& other) =
default;
390 inline StorageRef& operator=(
const StorageRef& other) {
392 value() = other.value();
395 inline StorageRef& operator=(
const value_type& other) {
397 value() = other.value();
400 inline operator value_type()
const {
return value_type(key(), value()); }
401 inline friend void swap(
const StorageRef&
a,
const StorageRef&
b) {
402 std::iter_swap(
a.keyPtr(),
b.keyPtr());
403 std::iter_swap(
a.valuePtr(),
b.valuePtr());
406 inline const StorageIndex& key()
const {
return *m_innerIndexIterator; }
407 inline StorageIndex& key() {
return *m_innerIndexIterator; }
408 inline const Scalar& value()
const {
return *m_valueIterator; }
409 inline Scalar& value() {
return *m_valueIterator; }
410 inline StorageIndex* keyPtr()
const {
return m_innerIndexIterator; }
411 inline Scalar* valuePtr()
const {
return m_valueIterator; }
414 inline operator StorageIndex()
const {
return *m_innerIndexIterator; }
417 StorageIndex* m_innerIndexIterator;
418 Scalar* m_valueIterator;
420 StorageRef() =
delete;
422 StorageRef(StorageIndex* innerIndexIterator, Scalar* valueIterator) : m_innerIndexIterator(innerIndexIterator), m_valueIterator(valueIterator) {}
423 StorageRef(
const StorageRef& other) : m_innerIndexIterator(other.m_innerIndexIterator), m_valueIterator(other.m_valueIterator) {}
425 friend class CompressedStorageIterator<Scalar, StorageIndex>;
429 template<
typename Scalar,
typename StorageIndex>
430 class CompressedStorageIterator
433 using iterator_category = std::random_access_iterator_tag;
434 using reference = StorageRef<Scalar, StorageIndex>;
435 using difference_type =
Index;
436 using value_type =
typename reference::value_type;
437 using pointer = value_type*;
439 CompressedStorageIterator() =
delete;
440 CompressedStorageIterator(difference_type index, StorageIndex* innerIndexPtr, Scalar* valuePtr) : m_index(index), m_data(innerIndexPtr, valuePtr) {}
441 CompressedStorageIterator(difference_type index, reference
data) : m_index(index), m_data(
data) {}
442 CompressedStorageIterator(
const CompressedStorageIterator& other) : m_index(other.m_index), m_data(other.m_data) {}
443 CompressedStorageIterator(CompressedStorageIterator&& other) =
default;
444 inline CompressedStorageIterator& operator=(
const CompressedStorageIterator& other) {
445 m_index = other.m_index;
446 m_data = other.m_data;
450 inline CompressedStorageIterator
operator+(difference_type offset)
const {
return CompressedStorageIterator(m_index + offset, m_data); }
451 inline CompressedStorageIterator
operator-(difference_type offset)
const {
return CompressedStorageIterator(m_index - offset, m_data); }
452 inline difference_type
operator-(
const CompressedStorageIterator& other)
const {
return m_index - other.m_index; }
453 inline CompressedStorageIterator&
operator++() { ++m_index;
return *
this; }
454 inline CompressedStorageIterator&
operator--() { --m_index;
return *
this; }
455 inline CompressedStorageIterator&
operator+=(difference_type offset) { m_index += offset;
return *
this; }
456 inline CompressedStorageIterator&
operator-=(difference_type offset) { m_index -= offset;
return *
this; }
457 inline reference
operator*()
const {
return reference(m_data.keyPtr() + m_index, m_data.valuePtr() + m_index); }
459 #define MAKE_COMP(OP) inline bool operator OP(const CompressedStorageIterator& other) const { return m_index OP other.m_index; }
469 difference_type m_index;
473 template <
typename Derived,
class Comp,
bool IsVector>
474 struct inner_sort_impl {
475 typedef typename Derived::Scalar Scalar;
476 typedef typename Derived::StorageIndex StorageIndex;
477 static inline void run(SparseCompressedBase<Derived>& obj,
Index begin,
Index end) {
478 const bool is_compressed = obj.isCompressed();
479 for (
Index outer = begin; outer <
end; outer++) {
480 Index begin_offset = obj.outerIndexPtr()[outer];
481 Index end_offset = is_compressed ? obj.outerIndexPtr()[outer + 1] : (begin_offset + obj.innerNonZeroPtr()[outer]);
482 CompressedStorageIterator<Scalar, StorageIndex> begin_it(begin_offset, obj.innerIndexPtr(), obj.valuePtr());
483 CompressedStorageIterator<Scalar, StorageIndex> end_it(end_offset, obj.innerIndexPtr(), obj.valuePtr());
484 std::sort(begin_it, end_it, Comp());
487 static inline Index check(
const SparseCompressedBase<Derived>& obj,
Index begin,
Index end) {
488 const bool is_compressed = obj.isCompressed();
489 for (
Index outer = begin; outer <
end; outer++) {
490 Index begin_offset = obj.outerIndexPtr()[outer];
491 Index end_offset = is_compressed ? obj.outerIndexPtr()[outer + 1] : (begin_offset + obj.innerNonZeroPtr()[outer]);
492 const StorageIndex* begin_it = obj.innerIndexPtr() + begin_offset;
493 const StorageIndex* end_it = obj.innerIndexPtr() + end_offset;
494 bool is_sorted = std::is_sorted(begin_it, end_it, Comp());
495 if (!is_sorted)
return outer;
500 template <
typename Derived,
class Comp>
501 struct inner_sort_impl<Derived, Comp, true> {
502 typedef typename Derived::Scalar Scalar;
503 typedef typename Derived::StorageIndex StorageIndex;
504 static inline void run(SparseCompressedBase<Derived>& obj,
Index,
Index) {
505 Index begin_offset = 0;
506 Index end_offset = obj.nonZeros();
507 CompressedStorageIterator<Scalar, StorageIndex> begin_it(begin_offset, obj.innerIndexPtr(), obj.valuePtr());
508 CompressedStorageIterator<Scalar, StorageIndex> end_it(end_offset, obj.innerIndexPtr(), obj.valuePtr());
509 std::sort(begin_it, end_it, Comp());
511 static inline Index check(
const SparseCompressedBase<Derived>& obj,
Index,
Index) {
512 Index begin_offset = 0;
513 Index end_offset = obj.nonZeros();
514 const StorageIndex* begin_it = obj.innerIndexPtr() + begin_offset;
515 const StorageIndex* end_it = obj.innerIndexPtr() + end_offset;
516 return std::is_sorted(begin_it, end_it, Comp()) ? 1 : 0;
520 template<
typename Derived>
521 struct evaluator<SparseCompressedBase<Derived> >
522 : evaluator_base<Derived>
524 typedef typename Derived::Scalar Scalar;
525 typedef typename Derived::InnerIterator InnerIterator;
529 Flags = Derived::Flags
532 evaluator() : m_matrix(0), m_zero(0)
536 explicit evaluator(
const Derived &
mat) : m_matrix(&
mat), m_zero(0)
541 inline Index nonZerosEstimate()
const {
542 return m_matrix->nonZeros();
545 operator Derived&() {
return m_matrix->const_cast_derived(); }
546 operator const Derived&()
const {
return *m_matrix; }
556 return m_matrix->const_cast_derived().valuePtr()[
p];
563 return m_matrix->const_cast_derived().valuePtr()[
p];
570 internal::LowerBoundIndex
p = m_matrix->lower_bound(
row,
col);
574 const Derived *m_matrix;
RowXpr row(Index i)
This is the const version of row(). */.
ColXpr col(Index i)
This is the const version of col().
#define eigen_internal_assert(x)
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
#define EIGEN_INTERNAL_CHECK_COST_VALUE(C)
#define EIGEN_STATIC_ASSERT_VECTOR_ONLY(TYPE)
std::conditional_t< bool(internal::traits< Derived >::Flags &LvalueBit), const Scalar &, std::conditional_t< internal::is_arithmetic< Scalar >::value, Scalar, const Scalar > > CoeffReturnType
A matrix or vector expression mapping an existing array of data.
static ConstMapType Map(const Scalar *data)
Common base class for sparse [compressed]-{row|column}-storage format.
Index innerIndicesAreSorted() const
const Scalar * valuePtr() const
const Map< const Array< Scalar, Dynamic, 1 > > coeffs() const
StorageIndex * innerIndexPtr()
const StorageIndex * outerIndexPtr() const
StorageIndex * innerNonZeroPtr()
SparseMatrixBase< Derived > Base
StorageIndex * outerIndexPtr()
internal::LowerBoundIndex lower_bound(Index row, Index col) const
Index innerIndicesAreSorted(Index begin, Index end) const
const Eigen::Map< const IndexVector > innerNonZeros() const
Map< Array< Scalar, Dynamic, 1 > > coeffs()
bool isCompressed() const
const StorageIndex * innerIndexPtr() const
void sortInnerIndices(Index begin, Index end)
SparseCompressedBase(const SparseCompressedBase< OtherDerived > &)
Base::IndexVector IndexVector
Derived & operator=(const Derived &other)
const StorageIndex * innerNonZeroPtr() const
Eigen::Map< IndexVector > innerNonZeros()
Base class of any sparse matrices or sparse expressions.
internal::traits< Derived >::StorageIndex StorageIndex
internal::traits< Derived >::Scalar Scalar
Derived & operator-=(const SparseMatrixBase< OtherDerived > &other)
Derived & operator+=(const SparseMatrixBase< OtherDerived > &other)
static const lastp1_t end
bfloat16 operator++(bfloat16 &a)
bfloat16 & operator+=(bfloat16 &a, const bfloat16 &b)
bfloat16 operator--(bfloat16 &a)
bfloat16 & operator-=(bfloat16 &a, const bfloat16 &b)
void swap(scoped_array< T > &a, scoped_array< T > &b)
const CwiseBinaryOp< internal::scalar_difference_op< typename DenseDerived::Scalar, typename SparseDerived::Scalar >, const DenseDerived, const SparseDerived > operator-(const MatrixBase< DenseDerived > &a, const SparseMatrixBase< SparseDerived > &b)
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
const Product< MatrixDerived, PermutationDerived, AliasFreeProduct > operator*(const MatrixBase< MatrixDerived > &matrix, const PermutationBase< PermutationDerived > &permutation)
const CwiseBinaryOp< internal::scalar_sum_op< typename DenseDerived::Scalar, typename SparseDerived::Scalar >, const DenseDerived, const SparseDerived > operator+(const MatrixBase< DenseDerived > &a, const SparseMatrixBase< SparseDerived > &b)
Eigen::Index Index
The interface type of indices.