SparseCompressedBase.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2015 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
11 #define EIGEN_SPARSE_COMPRESSED_BASE_H
12 
13 #include "./InternalHeaderCheck.h"
14 
15 namespace Eigen {
16 
17 template<typename Derived> class SparseCompressedBase;
18 
19 namespace internal {
20 
21 template<typename Derived>
22 struct traits<SparseCompressedBase<Derived> > : traits<Derived>
23 {};
24 
25 template <typename Derived, class Comp, bool IsVector>
26 struct inner_sort_impl;
27 
28 } // end namespace internal
29 
40 template<typename Derived>
42  : public SparseMatrixBase<Derived>
43 {
44  public:
47  using Base::operator=;
48  using Base::IsRowMajor;
49 
50  class InnerIterator;
51  class ReverseInnerIterator;
52 
53  protected:
54  typedef typename Base::IndexVector IndexVector;
57 
58  public:
59 
61  inline Index nonZeros() const
62  {
63  if (Derived::IsVectorAtCompileTime && outerIndexPtr() == 0)
64  return derived().nonZeros();
65  else if (derived().outerSize() == 0)
66  return 0;
67  else if (isCompressed())
68  return outerIndexPtr()[derived().outerSize()] - outerIndexPtr()[0];
69  else
70  return innerNonZeros().sum();
71  }
72 
76  inline const Scalar* valuePtr() const { return derived().valuePtr(); }
80  inline Scalar* valuePtr() { return derived().valuePtr(); }
81 
85  inline const StorageIndex* innerIndexPtr() const { return derived().innerIndexPtr(); }
89  inline StorageIndex* innerIndexPtr() { return derived().innerIndexPtr(); }
90 
95  inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
100  inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
101 
105  inline const StorageIndex* innerNonZeroPtr() const { return derived().innerNonZeroPtr(); }
109  inline StorageIndex* innerNonZeroPtr() { return derived().innerNonZeroPtr(); }
110 
112  inline bool isCompressed() const { return innerNonZeroPtr()==0; }
113 
120 
132 
135  template <class Comp = std::less<>>
136  inline void sortInnerIndices(Index begin, Index end) {
137  eigen_assert(begin >= 0 && end <= derived().outerSize() && end >= begin);
138  internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::run(*this, begin, end);
139  }
140 
143  template <class Comp = std::less<>>
144  inline Index innerIndicesAreSorted(Index begin, Index end) const {
145  eigen_assert(begin >= 0 && end <= derived().outerSize() && end >= begin);
146  return internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::check(*this, begin, end);
147  }
148 
151  template <class Comp = std::less<>>
152  inline void sortInnerIndices() {
153  Index begin = 0;
154  Index end = derived().outerSize();
155  internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::run(*this, begin, end);
156  }
157 
160  template<class Comp = std::less<>>
161  inline Index innerIndicesAreSorted() const {
162  Index begin = 0;
163  Index end = derived().outerSize();
164  return internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::check(*this, begin, end);
165  }
166 
167  protected:
170 
174  internal::LowerBoundIndex lower_bound(Index row, Index col) const
175  {
176  eigen_internal_assert(row>=0 && row<this->rows() && col>=0 && col<this->cols());
177 
178  const Index outer = Derived::IsRowMajor ? row : col;
179  const Index inner = Derived::IsRowMajor ? col : row;
180 
181  Index start = this->outerIndexPtr()[outer];
182  Index end = this->isCompressed() ? this->outerIndexPtr()[outer+1] : this->outerIndexPtr()[outer] + this->innerNonZeroPtr()[outer];
183  eigen_assert(end>=start && "you are using a non finalized sparse matrix or written coefficient does not exist");
184  internal::LowerBoundIndex p;
185  p.value = std::lower_bound(this->innerIndexPtr()+start, this->innerIndexPtr()+end,inner) - this->innerIndexPtr();
186  p.found = (p.value<end) && (this->innerIndexPtr()[p.value]==inner);
187  return p;
188  }
189 
190  friend struct internal::evaluator<SparseCompressedBase<Derived> >;
191 
192  private:
193  template<typename OtherDerived> explicit SparseCompressedBase(const SparseCompressedBase<OtherDerived>&);
194 };
195 
196 template<typename Derived>
197 class SparseCompressedBase<Derived>::InnerIterator
198 {
199  public:
200  InnerIterator()
201  : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0)
202  {}
203 
204  InnerIterator(const InnerIterator& other)
205  : m_values(other.m_values), m_indices(other.m_indices), m_outer(other.m_outer), m_id(other.m_id), m_end(other.m_end)
206  {}
207 
208  InnerIterator& operator=(const InnerIterator& other)
209  {
210  m_values = other.m_values;
211  m_indices = other.m_indices;
212  const_cast<OuterType&>(m_outer).setValue(other.m_outer.value());
213  m_id = other.m_id;
214  m_end = other.m_end;
215  return *this;
216  }
217 
218  InnerIterator(const SparseCompressedBase& mat, Index outer)
219  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
220  {
221  if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
222  {
223  m_id = 0;
224  m_end = mat.nonZeros();
225  }
226  else
227  {
228  m_id = mat.outerIndexPtr()[outer];
229  if(mat.isCompressed())
230  m_end = mat.outerIndexPtr()[outer+1];
231  else
232  m_end = m_id + mat.innerNonZeroPtr()[outer];
233  }
234  }
235 
236  explicit InnerIterator(const SparseCompressedBase& mat) : InnerIterator(mat, Index(0))
237  {
239  }
240 
241  explicit InnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
242  : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_id(0), m_end(data.size())
243  {
245  }
246 
247  inline InnerIterator& operator++() { m_id++; return *this; }
248  inline InnerIterator& operator+=(Index i) { m_id += i ; return *this; }
249 
250  inline InnerIterator operator+(Index i)
251  {
252  InnerIterator result = *this;
253  result += i;
254  return result;
255  }
256 
257  inline const Scalar& value() const { return m_values[m_id]; }
258  inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
259 
260  inline StorageIndex index() const { return m_indices[m_id]; }
261  inline Index outer() const { return m_outer.value(); }
262  inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
263  inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
264 
265  inline operator bool() const { return (m_id < m_end); }
266 
267  protected:
268  const Scalar* m_values;
269  const StorageIndex* m_indices;
270  typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
271  const OuterType m_outer;
272  Index m_id;
273  Index m_end;
274  private:
275  // If you get here, then you're not using the right InnerIterator type, e.g.:
276  // SparseMatrix<double,RowMajor> A;
277  // SparseMatrix<double>::InnerIterator it(A,0);
278  template<typename T> InnerIterator(const SparseMatrixBase<T>&, Index outer);
279 };
280 
281 template<typename Derived>
282 class SparseCompressedBase<Derived>::ReverseInnerIterator
283 {
284  public:
285  ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
286  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
287  {
288  if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
289  {
290  m_start = 0;
291  m_id = mat.nonZeros();
292  }
293  else
294  {
295  m_start = mat.outerIndexPtr()[outer];
296  if(mat.isCompressed())
297  m_id = mat.outerIndexPtr()[outer+1];
298  else
299  m_id = m_start + mat.innerNonZeroPtr()[outer];
300  }
301  }
302 
303  explicit ReverseInnerIterator(const SparseCompressedBase& mat)
304  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros())
305  {
307  }
308 
309  explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
310  : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_start(0), m_id(data.size())
311  {
313  }
314 
315  inline ReverseInnerIterator& operator--() { --m_id; return *this; }
316  inline ReverseInnerIterator& operator-=(Index i) { m_id -= i; return *this; }
317 
318  inline ReverseInnerIterator operator-(Index i)
319  {
320  ReverseInnerIterator result = *this;
321  result -= i;
322  return result;
323  }
324 
325  inline const Scalar& value() const { return m_values[m_id-1]; }
326  inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
327 
328  inline StorageIndex index() const { return m_indices[m_id-1]; }
329  inline Index outer() const { return m_outer.value(); }
330  inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
331  inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
332 
333  inline operator bool() const { return (m_id > m_start); }
334 
335  protected:
336  const Scalar* m_values;
337  const StorageIndex* m_indices;
338  typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
339  const OuterType m_outer;
340  Index m_start;
341  Index m_id;
342 };
343 
344 namespace internal {
345 
346 // modified from https://artificial-mind.net/blog/2020/11/28/std-sort-multiple-ranges
347 
348 template <typename Scalar, typename StorageIndex>
349 class StorageVal;
350 template <typename Scalar, typename StorageIndex>
351 class StorageRef;
352 template <typename Scalar, typename StorageIndex>
353 class CompressedStorageIterator;
354 
355 // class to hold an index/value pair
356 template <typename Scalar, typename StorageIndex>
357 class StorageVal
358 {
359 public:
360 
361  StorageVal(const StorageIndex& innerIndex, const Scalar& value) : m_innerIndex(innerIndex), m_value(value) {}
362  StorageVal(const StorageVal& other) : m_innerIndex(other.m_innerIndex), m_value(other.m_value) {}
363  StorageVal(StorageVal&& other) = default;
364 
365  inline const StorageIndex& key() const { return m_innerIndex; }
366  inline StorageIndex& key() { return m_innerIndex; }
367  inline const Scalar& value() const { return m_value; }
368  inline Scalar& value() { return m_value; }
369 
370  // enables StorageVal to be compared with respect to any type that is convertible to StorageIndex
371  inline operator StorageIndex() const { return m_innerIndex; }
372 
373 protected:
374  StorageIndex m_innerIndex;
375  Scalar m_value;
376 private:
377  StorageVal() = delete;
378 };
379 // class to hold an index/value iterator pair
380 // used to define assignment, swap, and comparison operators for CompressedStorageIterator
381 template <typename Scalar, typename StorageIndex>
382 class StorageRef
383 {
384 public:
385  using value_type = StorageVal<Scalar, StorageIndex>;
386 
387  // StorageRef Needs to be move-able for sort on macos.
388  StorageRef(StorageRef&& other) = default;
389 
390  inline StorageRef& operator=(const StorageRef& other) {
391  key() = other.key();
392  value() = other.value();
393  return *this;
394  }
395  inline StorageRef& operator=(const value_type& other) {
396  key() = other.key();
397  value() = other.value();
398  return *this;
399  }
400  inline operator value_type() const { return value_type(key(), value()); }
401  inline friend void swap(const StorageRef& a, const StorageRef& b) {
402  std::iter_swap(a.keyPtr(), b.keyPtr());
403  std::iter_swap(a.valuePtr(), b.valuePtr());
404  }
405 
406  inline const StorageIndex& key() const { return *m_innerIndexIterator; }
407  inline StorageIndex& key() { return *m_innerIndexIterator; }
408  inline const Scalar& value() const { return *m_valueIterator; }
409  inline Scalar& value() { return *m_valueIterator; }
410  inline StorageIndex* keyPtr() const { return m_innerIndexIterator; }
411  inline Scalar* valuePtr() const { return m_valueIterator; }
412 
413  // enables StorageRef to be compared with respect to any type that is convertible to StorageIndex
414  inline operator StorageIndex() const { return *m_innerIndexIterator; }
415 
416 protected:
417  StorageIndex* m_innerIndexIterator;
418  Scalar* m_valueIterator;
419 private:
420  StorageRef() = delete;
421  // these constructors are called by the CompressedStorageIterator constructors for convenience only
422  StorageRef(StorageIndex* innerIndexIterator, Scalar* valueIterator) : m_innerIndexIterator(innerIndexIterator), m_valueIterator(valueIterator) {}
423  StorageRef(const StorageRef& other) : m_innerIndexIterator(other.m_innerIndexIterator), m_valueIterator(other.m_valueIterator) {}
424 
425  friend class CompressedStorageIterator<Scalar, StorageIndex>;
426 };
427 
428 // STL-compatible iterator class that operates on inner indices and values
429 template<typename Scalar, typename StorageIndex>
430 class CompressedStorageIterator
431 {
432 public:
433  using iterator_category = std::random_access_iterator_tag;
434  using reference = StorageRef<Scalar, StorageIndex>;
435  using difference_type = Index;
436  using value_type = typename reference::value_type;
437  using pointer = value_type*;
438 
439  CompressedStorageIterator() = delete;
440  CompressedStorageIterator(difference_type index, StorageIndex* innerIndexPtr, Scalar* valuePtr) : m_index(index), m_data(innerIndexPtr, valuePtr) {}
441  CompressedStorageIterator(difference_type index, reference data) : m_index(index), m_data(data) {}
442  CompressedStorageIterator(const CompressedStorageIterator& other) : m_index(other.m_index), m_data(other.m_data) {}
443  CompressedStorageIterator(CompressedStorageIterator&& other) = default;
444  inline CompressedStorageIterator& operator=(const CompressedStorageIterator& other) {
445  m_index = other.m_index;
446  m_data = other.m_data;
447  return *this;
448  }
449 
450  inline CompressedStorageIterator operator+(difference_type offset) const { return CompressedStorageIterator(m_index + offset, m_data); }
451  inline CompressedStorageIterator operator-(difference_type offset) const { return CompressedStorageIterator(m_index - offset, m_data); }
452  inline difference_type operator-(const CompressedStorageIterator& other) const { return m_index - other.m_index; }
453  inline CompressedStorageIterator& operator++() { ++m_index; return *this; }
454  inline CompressedStorageIterator& operator--() { --m_index; return *this; }
455  inline CompressedStorageIterator& operator+=(difference_type offset) { m_index += offset; return *this; }
456  inline CompressedStorageIterator& operator-=(difference_type offset) { m_index -= offset; return *this; }
457  inline reference operator*() const { return reference(m_data.keyPtr() + m_index, m_data.valuePtr() + m_index); }
458 
459  #define MAKE_COMP(OP) inline bool operator OP(const CompressedStorageIterator& other) const { return m_index OP other.m_index; }
460  MAKE_COMP(<)
461  MAKE_COMP(>)
462  MAKE_COMP(>=)
463  MAKE_COMP(<=)
464  MAKE_COMP(!=)
465  MAKE_COMP(==)
466  #undef MAKE_COMP
467 
468 protected:
469  difference_type m_index;
470  reference m_data;
471 };
472 
473 template <typename Derived, class Comp, bool IsVector>
474 struct inner_sort_impl {
475  typedef typename Derived::Scalar Scalar;
476  typedef typename Derived::StorageIndex StorageIndex;
477  static inline void run(SparseCompressedBase<Derived>& obj, Index begin, Index end) {
478  const bool is_compressed = obj.isCompressed();
479  for (Index outer = begin; outer < end; outer++) {
480  Index begin_offset = obj.outerIndexPtr()[outer];
481  Index end_offset = is_compressed ? obj.outerIndexPtr()[outer + 1] : (begin_offset + obj.innerNonZeroPtr()[outer]);
482  CompressedStorageIterator<Scalar, StorageIndex> begin_it(begin_offset, obj.innerIndexPtr(), obj.valuePtr());
483  CompressedStorageIterator<Scalar, StorageIndex> end_it(end_offset, obj.innerIndexPtr(), obj.valuePtr());
484  std::sort(begin_it, end_it, Comp());
485  }
486  }
487  static inline Index check(const SparseCompressedBase<Derived>& obj, Index begin, Index end) {
488  const bool is_compressed = obj.isCompressed();
489  for (Index outer = begin; outer < end; outer++) {
490  Index begin_offset = obj.outerIndexPtr()[outer];
491  Index end_offset = is_compressed ? obj.outerIndexPtr()[outer + 1] : (begin_offset + obj.innerNonZeroPtr()[outer]);
492  const StorageIndex* begin_it = obj.innerIndexPtr() + begin_offset;
493  const StorageIndex* end_it = obj.innerIndexPtr() + end_offset;
494  bool is_sorted = std::is_sorted(begin_it, end_it, Comp());
495  if (!is_sorted) return outer;
496  }
497  return end;
498  }
499 };
500 template <typename Derived, class Comp>
501 struct inner_sort_impl<Derived, Comp, true> {
502  typedef typename Derived::Scalar Scalar;
503  typedef typename Derived::StorageIndex StorageIndex;
504  static inline void run(SparseCompressedBase<Derived>& obj, Index, Index) {
505  Index begin_offset = 0;
506  Index end_offset = obj.nonZeros();
507  CompressedStorageIterator<Scalar, StorageIndex> begin_it(begin_offset, obj.innerIndexPtr(), obj.valuePtr());
508  CompressedStorageIterator<Scalar, StorageIndex> end_it(end_offset, obj.innerIndexPtr(), obj.valuePtr());
509  std::sort(begin_it, end_it, Comp());
510  }
511  static inline Index check(const SparseCompressedBase<Derived>& obj, Index, Index) {
512  Index begin_offset = 0;
513  Index end_offset = obj.nonZeros();
514  const StorageIndex* begin_it = obj.innerIndexPtr() + begin_offset;
515  const StorageIndex* end_it = obj.innerIndexPtr() + end_offset;
516  return std::is_sorted(begin_it, end_it, Comp()) ? 1 : 0;
517  }
518 };
519 
520 template<typename Derived>
521 struct evaluator<SparseCompressedBase<Derived> >
522  : evaluator_base<Derived>
523 {
524  typedef typename Derived::Scalar Scalar;
525  typedef typename Derived::InnerIterator InnerIterator;
526 
527  enum {
528  CoeffReadCost = NumTraits<Scalar>::ReadCost,
529  Flags = Derived::Flags
530  };
531 
532  evaluator() : m_matrix(0), m_zero(0)
533  {
534  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
535  }
536  explicit evaluator(const Derived &mat) : m_matrix(&mat), m_zero(0)
537  {
538  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
539  }
540 
541  inline Index nonZerosEstimate() const {
542  return m_matrix->nonZeros();
543  }
544 
545  operator Derived&() { return m_matrix->const_cast_derived(); }
546  operator const Derived&() const { return *m_matrix; }
547 
548  typedef typename DenseCoeffsBase<Derived,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
549  const Scalar& coeff(Index row, Index col) const
550  {
551  Index p = find(row,col);
552 
553  if(p==Dynamic)
554  return m_zero;
555  else
556  return m_matrix->const_cast_derived().valuePtr()[p];
557  }
558 
559  Scalar& coeffRef(Index row, Index col)
560  {
561  Index p = find(row,col);
562  eigen_assert(p!=Dynamic && "written coefficient does not exist");
563  return m_matrix->const_cast_derived().valuePtr()[p];
564  }
565 
566 protected:
567 
568  Index find(Index row, Index col) const
569  {
570  internal::LowerBoundIndex p = m_matrix->lower_bound(row,col);
571  return p.found ? p.value : Dynamic;
572  }
573 
574  const Derived *m_matrix;
575  const Scalar m_zero;
576 };
577 
578 }
579 
580 } // end namespace Eigen
581 
582 #endif // EIGEN_SPARSE_COMPRESSED_BASE_H
Array< int, 3, 1 > b
RowXpr row(Index i)
This is the const version of row(). *‍/.
ColXpr col(Index i)
This is the const version of col().
#define eigen_internal_assert(x)
Definition: Macros.h:908
#define eigen_assert(x)
Definition: Macros.h:902
int data[]
#define MAKE_COMP(OP)
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
Definition: SparseUtil.h:45
#define EIGEN_INTERNAL_CHECK_COST_VALUE(C)
Definition: StaticAssert.h:112
#define EIGEN_STATIC_ASSERT_VECTOR_ONLY(TYPE)
Definition: StaticAssert.h:36
float * p
std::conditional_t< bool(internal::traits< Derived >::Flags &LvalueBit), const Scalar &, std::conditional_t< internal::is_arithmetic< Scalar >::value, Scalar, const Scalar > > CoeffReturnType
A matrix or vector expression mapping an existing array of data.
Definition: Map.h:98
Common base class for sparse [compressed]-{row|column}-storage format.
const Scalar * valuePtr() const
const Map< const Array< Scalar, Dynamic, 1 > > coeffs() const
const StorageIndex * outerIndexPtr() const
SparseMatrixBase< Derived > Base
internal::LowerBoundIndex lower_bound(Index row, Index col) const
Index innerIndicesAreSorted(Index begin, Index end) const
const Eigen::Map< const IndexVector > innerNonZeros() const
Map< Array< Scalar, Dynamic, 1 > > coeffs()
const StorageIndex * innerIndexPtr() const
void sortInnerIndices(Index begin, Index end)
SparseCompressedBase(const SparseCompressedBase< OtherDerived > &)
Derived & operator=(const Derived &other)
Definition: SparseAssign.h:45
const StorageIndex * innerNonZeroPtr() const
Eigen::Map< IndexVector > innerNonZeros()
Base class of any sparse matrices or sparse expressions.
internal::traits< Derived >::StorageIndex StorageIndex
internal::traits< Derived >::Scalar Scalar
Derived & operator-=(const SparseMatrixBase< OtherDerived > &other)
Derived & operator+=(const SparseMatrixBase< OtherDerived > &other)
static const lastp1_t end
bfloat16 operator++(bfloat16 &a)
Definition: BFloat16.h:298
bfloat16 & operator+=(bfloat16 &a, const bfloat16 &b)
Definition: BFloat16.h:282
bfloat16 operator--(bfloat16 &a)
Definition: BFloat16.h:302
bfloat16 & operator-=(bfloat16 &a, const bfloat16 &b)
Definition: BFloat16.h:290
void swap(scoped_array< T > &a, scoped_array< T > &b)
Definition: Memory.h:788
: InteropHeaders
Definition: Core:139
const CwiseBinaryOp< internal::scalar_difference_op< typename DenseDerived::Scalar, typename SparseDerived::Scalar >, const DenseDerived, const SparseDerived > operator-(const MatrixBase< DenseDerived > &a, const SparseMatrixBase< SparseDerived > &b)
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:82
const Product< MatrixDerived, PermutationDerived, AliasFreeProduct > operator*(const MatrixBase< MatrixDerived > &matrix, const PermutationBase< PermutationDerived > &permutation)
const CwiseBinaryOp< internal::scalar_sum_op< typename DenseDerived::Scalar, typename SparseDerived::Scalar >, const DenseDerived, const SparseDerived > operator+(const MatrixBase< DenseDerived > &a, const SparseMatrixBase< SparseDerived > &b)
const int Dynamic
Definition: Constants.h:24
Derived & derived()
Definition: EigenBase.h:48
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:41