Redux.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
5 // Copyright (C) 2006-2008 Benoit Jacob <jacob.benoit.1@gmail.com>
6 //
7 // This Source Code Form is subject to the terms of the Mozilla
8 // Public License v. 2.0. If a copy of the MPL was not distributed
9 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
10 
11 #ifndef EIGEN_REDUX_H
12 #define EIGEN_REDUX_H
13 
14 #include "./InternalHeaderCheck.h"
15 
16 namespace Eigen {
17 
18 namespace internal {
19 
20 // TODO
21 // * implement other kind of vectorization
22 // * factorize code
23 
24 
28 template<typename Func, typename Evaluator>
29 struct redux_traits
30 {
31 public:
32  typedef typename find_best_packet<typename Evaluator::Scalar,Evaluator::SizeAtCompileTime>::type PacketType;
33  enum {
35  InnerMaxSize = int(Evaluator::IsRowMajor)
36  ? Evaluator::MaxColsAtCompileTime
37  : Evaluator::MaxRowsAtCompileTime,
38  OuterMaxSize = int(Evaluator::IsRowMajor)
39  ? Evaluator::MaxRowsAtCompileTime
40  : Evaluator::MaxColsAtCompileTime,
41  SliceVectorizedWork = int(InnerMaxSize)==Dynamic ? Dynamic
42  : int(OuterMaxSize)==Dynamic ? (int(InnerMaxSize)>=int(PacketSize) ? Dynamic : 0)
43  : (int(InnerMaxSize)/int(PacketSize)) * int(OuterMaxSize)
44  };
45 
46  enum {
47  MayLinearize = (int(Evaluator::Flags) & LinearAccessBit),
48  MightVectorize = (int(Evaluator::Flags)&ActualPacketAccessBit)
49  && (functor_traits<Func>::PacketAccess),
50  MayLinearVectorize = bool(MightVectorize) && bool(MayLinearize),
51  MaySliceVectorize = bool(MightVectorize) && (int(SliceVectorizedWork)==Dynamic || int(SliceVectorizedWork)>=3)
52  };
53 
54 public:
55  enum {
56  Traversal = int(MayLinearVectorize) ? int(LinearVectorizedTraversal)
57  : int(MaySliceVectorize) ? int(SliceVectorizedTraversal)
58  : int(MayLinearize) ? int(LinearTraversal)
59  : int(DefaultTraversal)
60  };
61 
62 public:
63  enum {
64  Cost = Evaluator::SizeAtCompileTime == Dynamic ? HugeCost
65  : int(Evaluator::SizeAtCompileTime) * int(Evaluator::CoeffReadCost) + (Evaluator::SizeAtCompileTime-1) * functor_traits<Func>::Cost,
66  UnrollingLimit = EIGEN_UNROLLING_LIMIT * (int(Traversal) == int(DefaultTraversal) ? 1 : int(PacketSize))
67  };
68 
69 public:
70  enum {
71  Unrolling = Cost <= UnrollingLimit ? CompleteUnrolling : NoUnrolling
72  };
73 
74 #ifdef EIGEN_DEBUG_ASSIGN
75  static void debug()
76  {
77  std::cerr << "Xpr: " << typeid(typename Evaluator::XprType).name() << std::endl;
78  std::cerr.setf(std::ios::hex, std::ios::basefield);
79  EIGEN_DEBUG_VAR(Evaluator::Flags)
80  std::cerr.unsetf(std::ios::hex);
81  EIGEN_DEBUG_VAR(InnerMaxSize)
82  EIGEN_DEBUG_VAR(OuterMaxSize)
83  EIGEN_DEBUG_VAR(SliceVectorizedWork)
84  EIGEN_DEBUG_VAR(PacketSize)
85  EIGEN_DEBUG_VAR(MightVectorize)
86  EIGEN_DEBUG_VAR(MayLinearVectorize)
87  EIGEN_DEBUG_VAR(MaySliceVectorize)
88  std::cerr << "Traversal" << " = " << Traversal << " (" << demangle_traversal(Traversal) << ")" << std::endl;
89  EIGEN_DEBUG_VAR(UnrollingLimit)
90  std::cerr << "Unrolling" << " = " << Unrolling << " (" << demangle_unrolling(Unrolling) << ")" << std::endl;
91  std::cerr << std::endl;
92  }
93 #endif
94 };
95 
96 
100 
102 template<typename Func, typename Evaluator, Index Start, Index Length>
103 struct redux_novec_unroller
104 {
105  static constexpr Index HalfLength = Length/2;
106 
107  typedef typename Evaluator::Scalar Scalar;
108 
110  static EIGEN_STRONG_INLINE Scalar run(const Evaluator &eval, const Func& func)
111  {
112  return func(redux_novec_unroller<Func, Evaluator, Start, HalfLength>::run(eval,func),
113  redux_novec_unroller<Func, Evaluator, Start+HalfLength, Length-HalfLength>::run(eval,func));
114  }
115 };
116 
117 template<typename Func, typename Evaluator, Index Start>
118 struct redux_novec_unroller<Func, Evaluator, Start, 1>
119 {
120  static constexpr Index outer = Start / Evaluator::InnerSizeAtCompileTime;
121  static constexpr Index inner = Start % Evaluator::InnerSizeAtCompileTime;
122 
123  typedef typename Evaluator::Scalar Scalar;
124 
126  static EIGEN_STRONG_INLINE Scalar run(const Evaluator &eval, const Func&)
127  {
128  return eval.coeffByOuterInner(outer, inner);
129  }
130 };
131 
132 // This is actually dead code and will never be called. It is required
133 // to prevent false warnings regarding failed inlining though
134 // for 0 length run() will never be called at all.
135 template<typename Func, typename Evaluator, Index Start>
136 struct redux_novec_unroller<Func, Evaluator, Start, 0>
137 {
138  typedef typename Evaluator::Scalar Scalar;
140  static EIGEN_STRONG_INLINE Scalar run(const Evaluator&, const Func&) { return Scalar(); }
141 };
142 
143 template<typename Func, typename Evaluator, Index Start, Index Length>
144 struct redux_novec_linear_unroller
145 {
146  static constexpr Index HalfLength = Length/2;
147 
148  typedef typename Evaluator::Scalar Scalar;
149 
151  static EIGEN_STRONG_INLINE Scalar run(const Evaluator &eval, const Func& func)
152  {
153  return func(redux_novec_linear_unroller<Func, Evaluator, Start, HalfLength>::run(eval,func),
154  redux_novec_linear_unroller<Func, Evaluator, Start+HalfLength, Length-HalfLength>::run(eval,func));
155  }
156 };
157 
158 template<typename Func, typename Evaluator, Index Start>
159 struct redux_novec_linear_unroller<Func, Evaluator, Start, 1>
160 {
161  typedef typename Evaluator::Scalar Scalar;
162 
164  static EIGEN_STRONG_INLINE Scalar run(const Evaluator &eval, const Func&)
165  {
166  return eval.coeff(Start);
167  }
168 };
169 
170 // This is actually dead code and will never be called. It is required
171 // to prevent false warnings regarding failed inlining though
172 // for 0 length run() will never be called at all.
173 template<typename Func, typename Evaluator, Index Start>
174 struct redux_novec_linear_unroller<Func, Evaluator, Start, 0>
175 {
176  typedef typename Evaluator::Scalar Scalar;
178  static EIGEN_STRONG_INLINE Scalar run(const Evaluator&, const Func&) { return Scalar(); }
179 };
180 
181 
183 template<typename Func, typename Evaluator, Index Start, Index Length>
184 struct redux_vec_unroller
185 {
186  template<typename PacketType>
188  static EIGEN_STRONG_INLINE PacketType run(const Evaluator &eval, const Func& func)
189  {
190  constexpr Index HalfLength = Length/2;
191 
192  return func.packetOp(
193  redux_vec_unroller<Func, Evaluator, Start, HalfLength>::template run<PacketType>(eval,func),
194  redux_vec_unroller<Func, Evaluator, Start+HalfLength, Length-HalfLength>::template run<PacketType>(eval,func) );
195  }
196 };
197 
198 template<typename Func, typename Evaluator, Index Start>
199 struct redux_vec_unroller<Func, Evaluator, Start, 1>
200 {
201  template<typename PacketType>
203  static EIGEN_STRONG_INLINE PacketType run(const Evaluator &eval, const Func&)
204  {
205  constexpr Index PacketSize = unpacket_traits<PacketType>::size;
206  constexpr Index index = Start * PacketSize;
207  constexpr Index outer = index / int(Evaluator::InnerSizeAtCompileTime);
208  constexpr Index inner = index % int(Evaluator::InnerSizeAtCompileTime);
209  constexpr int alignment = Evaluator::Alignment;
210 
211  return eval.template packetByOuterInner<alignment,PacketType>(outer, inner);
212  }
213 };
214 
215 template<typename Func, typename Evaluator, Index Start, Index Length>
216 struct redux_vec_linear_unroller
217 {
218  template<typename PacketType>
220  static EIGEN_STRONG_INLINE PacketType run(const Evaluator &eval, const Func& func)
221  {
222  constexpr Index HalfLength = Length/2;
223 
224  return func.packetOp(
225  redux_vec_linear_unroller<Func, Evaluator, Start, HalfLength>::template run<PacketType>(eval,func),
226  redux_vec_linear_unroller<Func, Evaluator, Start+HalfLength, Length-HalfLength>::template run<PacketType>(eval,func) );
227  }
228 };
229 
230 template<typename Func, typename Evaluator, Index Start>
231 struct redux_vec_linear_unroller<Func, Evaluator, Start, 1>
232 {
233  template<typename PacketType>
235  static EIGEN_STRONG_INLINE PacketType run(const Evaluator &eval, const Func&)
236  {
237  constexpr Index PacketSize = unpacket_traits<PacketType>::size;
238  constexpr Index index = (Start * PacketSize);
239  constexpr int alignment = Evaluator::Alignment;
240  return eval.template packet<alignment,PacketType>(index);
241  }
242 };
243 
244 
248 template<typename Func, typename Evaluator,
249  int Traversal = redux_traits<Func, Evaluator>::Traversal,
250  int Unrolling = redux_traits<Func, Evaluator>::Unrolling
251 >
252 struct redux_impl;
253 
254 template<typename Func, typename Evaluator>
255 struct redux_impl<Func, Evaluator, DefaultTraversal, NoUnrolling>
256 {
257  typedef typename Evaluator::Scalar Scalar;
258 
259  template<typename XprType>
260  EIGEN_DEVICE_FUNC static EIGEN_STRONG_INLINE
261  Scalar run(const Evaluator &eval, const Func& func, const XprType& xpr)
262  {
263  eigen_assert(xpr.rows()>0 && xpr.cols()>0 && "you are using an empty matrix");
264  Scalar res = eval.coeffByOuterInner(0, 0);
265  for(Index i = 1; i < xpr.innerSize(); ++i)
266  res = func(res, eval.coeffByOuterInner(0, i));
267  for(Index i = 1; i < xpr.outerSize(); ++i)
268  for(Index j = 0; j < xpr.innerSize(); ++j)
269  res = func(res, eval.coeffByOuterInner(i, j));
270  return res;
271  }
272 };
273 
274 template<typename Func, typename Evaluator>
275 struct redux_impl<Func, Evaluator, LinearTraversal, NoUnrolling>
276 {
277  typedef typename Evaluator::Scalar Scalar;
278 
279  template<typename XprType>
280  EIGEN_DEVICE_FUNC static EIGEN_STRONG_INLINE
281  Scalar run(const Evaluator &eval, const Func& func, const XprType& xpr)
282  {
283  eigen_assert(xpr.size()>0 && "you are using an empty matrix");
284  Scalar res = eval.coeff(0);
285  for(Index k = 1; k < xpr.size(); ++k)
286  res = func(res, eval.coeff(k));
287  return res;
288  }
289 };
290 
291 template<typename Func, typename Evaluator>
292 struct redux_impl<Func,Evaluator, DefaultTraversal, CompleteUnrolling>
293  : redux_novec_unroller<Func,Evaluator, 0, Evaluator::SizeAtCompileTime>
294 {
295  typedef redux_novec_unroller<Func,Evaluator, 0, Evaluator::SizeAtCompileTime> Base;
296  typedef typename Evaluator::Scalar Scalar;
297  template<typename XprType>
298  EIGEN_DEVICE_FUNC static EIGEN_STRONG_INLINE
299  Scalar run(const Evaluator &eval, const Func& func, const XprType& /*xpr*/)
300  {
301  return Base::run(eval,func);
302  }
303 };
304 
305 template<typename Func, typename Evaluator>
306 struct redux_impl<Func,Evaluator, LinearTraversal, CompleteUnrolling>
307  : redux_novec_linear_unroller<Func,Evaluator, 0, Evaluator::SizeAtCompileTime>
308 {
309  typedef redux_novec_linear_unroller<Func,Evaluator, 0, Evaluator::SizeAtCompileTime> Base;
310  typedef typename Evaluator::Scalar Scalar;
311  template<typename XprType>
312  EIGEN_DEVICE_FUNC static EIGEN_STRONG_INLINE
313  Scalar run(const Evaluator &eval, const Func& func, const XprType& /*xpr*/)
314  {
315  return Base::run(eval,func);
316  }
317 };
318 
319 template<typename Func, typename Evaluator>
320 struct redux_impl<Func, Evaluator, LinearVectorizedTraversal, NoUnrolling>
321 {
322  typedef typename Evaluator::Scalar Scalar;
323  typedef typename redux_traits<Func, Evaluator>::PacketType PacketScalar;
324 
325  template<typename XprType>
326  static Scalar run(const Evaluator &eval, const Func& func, const XprType& xpr)
327  {
328  const Index size = xpr.size();
329 
330  constexpr Index packetSize = redux_traits<Func, Evaluator>::PacketSize;
331  constexpr int packetAlignment = unpacket_traits<PacketScalar>::alignment;
332  constexpr int alignment0 = (bool(Evaluator::Flags & DirectAccessBit) && bool(packet_traits<Scalar>::AlignedOnScalar)) ? int(packetAlignment) : int(Unaligned);
333  constexpr int alignment = plain_enum_max(alignment0, Evaluator::Alignment);
334  const Index alignedStart = internal::first_default_aligned(xpr);
335  const Index alignedSize2 = ((size-alignedStart)/(2*packetSize))*(2*packetSize);
336  const Index alignedSize = ((size-alignedStart)/(packetSize))*(packetSize);
337  const Index alignedEnd2 = alignedStart + alignedSize2;
338  const Index alignedEnd = alignedStart + alignedSize;
339  Scalar res;
340  if(alignedSize)
341  {
342  PacketScalar packet_res0 = eval.template packet<alignment,PacketScalar>(alignedStart);
343  if(alignedSize>packetSize) // we have at least two packets to partly unroll the loop
344  {
345  PacketScalar packet_res1 = eval.template packet<alignment,PacketScalar>(alignedStart+packetSize);
346  for(Index index = alignedStart + 2*packetSize; index < alignedEnd2; index += 2*packetSize)
347  {
348  packet_res0 = func.packetOp(packet_res0, eval.template packet<alignment,PacketScalar>(index));
349  packet_res1 = func.packetOp(packet_res1, eval.template packet<alignment,PacketScalar>(index+packetSize));
350  }
351 
352  packet_res0 = func.packetOp(packet_res0,packet_res1);
353  if(alignedEnd>alignedEnd2)
354  packet_res0 = func.packetOp(packet_res0, eval.template packet<alignment,PacketScalar>(alignedEnd2));
355  }
356  res = func.predux(packet_res0);
357 
358  for(Index index = 0; index < alignedStart; ++index)
359  res = func(res,eval.coeff(index));
360 
361  for(Index index = alignedEnd; index < size; ++index)
362  res = func(res,eval.coeff(index));
363  }
364  else // too small to vectorize anything.
365  // since this is dynamic-size hence inefficient anyway for such small sizes, don't try to optimize.
366  {
367  res = eval.coeff(0);
368  for(Index index = 1; index < size; ++index)
369  res = func(res,eval.coeff(index));
370  }
371 
372  return res;
373  }
374 };
375 
376 // NOTE: for SliceVectorizedTraversal we simply bypass unrolling
377 template<typename Func, typename Evaluator, int Unrolling>
378 struct redux_impl<Func, Evaluator, SliceVectorizedTraversal, Unrolling>
379 {
380  typedef typename Evaluator::Scalar Scalar;
381  typedef typename redux_traits<Func, Evaluator>::PacketType PacketType;
382 
383  template<typename XprType>
384  EIGEN_DEVICE_FUNC static Scalar run(const Evaluator &eval, const Func& func, const XprType& xpr)
385  {
386  eigen_assert(xpr.rows()>0 && xpr.cols()>0 && "you are using an empty matrix");
387  constexpr Index packetSize = redux_traits<Func, Evaluator>::PacketSize;
388  const Index innerSize = xpr.innerSize();
389  const Index outerSize = xpr.outerSize();
390  const Index packetedInnerSize = ((innerSize)/packetSize)*packetSize;
391  Scalar res;
392  if(packetedInnerSize)
393  {
394  PacketType packet_res = eval.template packet<Unaligned,PacketType>(0,0);
395  for(Index j=0; j<outerSize; ++j)
396  for(Index i=(j==0?packetSize:0); i<packetedInnerSize; i+=Index(packetSize))
397  packet_res = func.packetOp(packet_res, eval.template packetByOuterInner<Unaligned,PacketType>(j,i));
398 
399  res = func.predux(packet_res);
400  for(Index j=0; j<outerSize; ++j)
401  for(Index i=packetedInnerSize; i<innerSize; ++i)
402  res = func(res, eval.coeffByOuterInner(j,i));
403  }
404  else // too small to vectorize anything.
405  // since this is dynamic-size hence inefficient anyway for such small sizes, don't try to optimize.
406  {
407  res = redux_impl<Func, Evaluator, DefaultTraversal, NoUnrolling>::run(eval, func, xpr);
408  }
409 
410  return res;
411  }
412 };
413 
414 template<typename Func, typename Evaluator>
415 struct redux_impl<Func, Evaluator, LinearVectorizedTraversal, CompleteUnrolling>
416 {
417  typedef typename Evaluator::Scalar Scalar;
418 
419  typedef typename redux_traits<Func, Evaluator>::PacketType PacketType;
420  static constexpr Index PacketSize = redux_traits<Func, Evaluator>::PacketSize;
421  static constexpr Index Size = Evaluator::SizeAtCompileTime;
422  static constexpr Index VectorizedSize = (int(Size) / int(PacketSize)) * int(PacketSize);
423 
424  template<typename XprType>
425  EIGEN_DEVICE_FUNC static EIGEN_STRONG_INLINE
426  Scalar run(const Evaluator &eval, const Func& func, const XprType &xpr)
427  {
429  eigen_assert(xpr.rows()>0 && xpr.cols()>0 && "you are using an empty matrix");
430  if (VectorizedSize > 0) {
431  Scalar res = func.predux(redux_vec_linear_unroller<Func, Evaluator, 0, Size / PacketSize>::template run<PacketType>(eval,func));
432  if (VectorizedSize != Size)
433  res = func(res,redux_novec_linear_unroller<Func, Evaluator, VectorizedSize, Size-VectorizedSize>::run(eval,func));
434  return res;
435  }
436  else {
437  return redux_novec_linear_unroller<Func, Evaluator, 0, Size>::run(eval,func);
438  }
439  }
440 };
441 
442 // evaluator adaptor
443 template<typename XprType_>
444 class redux_evaluator : public internal::evaluator<XprType_>
445 {
446  typedef internal::evaluator<XprType_> Base;
447 public:
448  typedef XprType_ XprType;
449  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
450  explicit redux_evaluator(const XprType &xpr) : Base(xpr) {}
451 
452  typedef typename XprType::Scalar Scalar;
453  typedef typename XprType::CoeffReturnType CoeffReturnType;
454  typedef typename XprType::PacketScalar PacketScalar;
455 
456  enum {
457  MaxRowsAtCompileTime = XprType::MaxRowsAtCompileTime,
458  MaxColsAtCompileTime = XprType::MaxColsAtCompileTime,
459  // TODO we should not remove DirectAccessBit and rather find an elegant way to query the alignment offset at runtime from the evaluator
460  Flags = Base::Flags & ~DirectAccessBit,
461  IsRowMajor = XprType::IsRowMajor,
462  SizeAtCompileTime = XprType::SizeAtCompileTime,
463  InnerSizeAtCompileTime = XprType::InnerSizeAtCompileTime
464  };
465 
466  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
467  CoeffReturnType coeffByOuterInner(Index outer, Index inner) const
468  { return Base::coeff(IsRowMajor ? outer : inner, IsRowMajor ? inner : outer); }
469 
470  template<int LoadMode, typename PacketType>
471  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
472  PacketType packetByOuterInner(Index outer, Index inner) const
473  { return Base::template packet<LoadMode,PacketType>(IsRowMajor ? outer : inner, IsRowMajor ? inner : outer); }
474 
475 };
476 
477 } // end namespace internal
478 
479 
493 template<typename Derived>
494 template<typename Func>
495 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE typename internal::traits<Derived>::Scalar
496 DenseBase<Derived>::redux(const Func& func) const
497 {
498  eigen_assert(this->rows()>0 && this->cols()>0 && "you are using an empty matrix");
499 
500  typedef typename internal::redux_evaluator<Derived> ThisEvaluator;
501  ThisEvaluator thisEval(derived());
502 
503  // The initial expression is passed to the reducer as an additional argument instead of
504  // passing it as a member of redux_evaluator to help
505  return internal::redux_impl<Func, ThisEvaluator>::run(thisEval, func, derived());
506 }
507 
515 template<typename Derived>
516 template<int NaNPropagation>
517 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE typename internal::traits<Derived>::Scalar
519 {
520  return derived().redux(Eigen::internal::scalar_min_op<Scalar,Scalar, NaNPropagation>());
521 }
522 
530 template<typename Derived>
531 template<int NaNPropagation>
532 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE typename internal::traits<Derived>::Scalar
534 {
535  return derived().redux(Eigen::internal::scalar_max_op<Scalar,Scalar, NaNPropagation>());
536 }
537 
544 template<typename Derived>
545 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE typename internal::traits<Derived>::Scalar
547 {
548  if(SizeAtCompileTime==0 || (SizeAtCompileTime==Dynamic && size()==0))
549  return Scalar(0);
550  return derived().redux(Eigen::internal::scalar_sum_op<Scalar,Scalar>());
551 }
552 
557 template<typename Derived>
558 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE typename internal::traits<Derived>::Scalar
560 {
561 #ifdef __INTEL_COMPILER
562  #pragma warning push
563  #pragma warning ( disable : 2259 )
564 #endif
565  return Scalar(derived().redux(Eigen::internal::scalar_sum_op<Scalar,Scalar>())) / Scalar(this->size());
566 #ifdef __INTEL_COMPILER
567  #pragma warning pop
568 #endif
569 }
570 
578 template<typename Derived>
579 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE typename internal::traits<Derived>::Scalar
581 {
582  if(SizeAtCompileTime==0 || (SizeAtCompileTime==Dynamic && size()==0))
583  return Scalar(1);
584  return derived().redux(Eigen::internal::scalar_product_op<Scalar>());
585 }
586 
593 template<typename Derived>
594 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE typename internal::traits<Derived>::Scalar
596 {
597  return derived().diagonal().sum();
598 }
599 
600 } // end namespace Eigen
601 
602 #endif // EIGEN_REDUX_H
#define EIGEN_DEBUG_VAR(x)
Definition: Macros.h:806
#define EIGEN_DEVICE_FUNC
Definition: Macros.h:883
#define EIGEN_ONLY_USED_FOR_DEBUG(x)
Definition: Macros.h:914
#define eigen_assert(x)
Definition: Macros.h:902
cout<< "Here is the matrix m:"<< endl<< m<< endl;Matrix< ptrdiff_t, 3, 1 > res
#define EIGEN_UNROLLING_LIMIT
Definition: Settings.h:24
internal::traits< Derived >::Scalar minCoeff() const
Definition: Redux.h:518
Scalar mean() const
Definition: Redux.h:559
internal::traits< Homogeneous< MatrixType, Direction_ > >::Scalar Scalar
Definition: DenseBase.h:61
internal::traits< Derived >::Scalar maxCoeff() const
Definition: Redux.h:533
Scalar redux(const BinaryOp &func) const
internal::find_best_packet< Scalar, SizeAtCompileTime >::type PacketScalar
Definition: DenseBase.h:173
Scalar sum() const
Definition: Redux.h:546
Scalar prod() const
Definition: Redux.h:580
Scalar trace() const
Definition: Redux.h:595
@ Unaligned
Definition: Constants.h:235
const unsigned int ActualPacketAccessBit
Definition: Constants.h:107
const unsigned int LinearAccessBit
Definition: Constants.h:132
const unsigned int DirectAccessBit
Definition: Constants.h:157
constexpr int plain_enum_max(A a, B b)
Definition: Meta.h:524
static Index first_default_aligned(const DenseBase< Derived > &m)
: InteropHeaders
Definition: Core:139
const int HugeCost
Definition: Constants.h:46
@ LinearVectorizedTraversal
Definition: Constants.h:287
@ DefaultTraversal
Definition: Constants.h:279
@ SliceVectorizedTraversal
Definition: Constants.h:290
@ LinearTraversal
Definition: Constants.h:281
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:82
@ CompleteUnrolling
Definition: Constants.h:306
@ NoUnrolling
Definition: Constants.h:301
const int Dynamic
Definition: Constants.h:24
std::ptrdiff_t j