12 #ifndef EIGEN_ASSIGN_EVALUATOR_H
13 #define EIGEN_ASSIGN_EVALUATOR_H
29 template <
typename DstEvaluator,
typename SrcEvaluator,
typename AssignFunc,
int MaxPacketSize = -1>
30 struct copy_using_evaluator_traits
32 typedef typename DstEvaluator::XprType Dst;
33 typedef typename Dst::Scalar DstScalar;
36 DstFlags = DstEvaluator::Flags,
37 SrcFlags = SrcEvaluator::Flags
42 DstAlignment = DstEvaluator::Alignment,
43 SrcAlignment = SrcEvaluator::Alignment,
50 InnerSize = int(Dst::IsVectorAtCompileTime) ? int(Dst::SizeAtCompileTime)
51 : int(DstFlags)&
RowMajorBit ? int(Dst::ColsAtCompileTime)
52 : int(Dst::RowsAtCompileTime),
53 InnerMaxSize = int(Dst::IsVectorAtCompileTime) ? int(Dst::MaxSizeAtCompileTime)
54 : int(DstFlags)&
RowMajorBit ? int(Dst::MaxColsAtCompileTime)
55 : int(Dst::MaxRowsAtCompileTime),
58 OuterStride = int(outer_stride_at_compile_time<Dst>::ret),
59 MaxSizeAtCompileTime = Dst::SizeAtCompileTime
63 typedef typename find_best_packet<DstScalar,RestrictedLinearSize>::type LinearPacketType;
64 typedef typename find_best_packet<DstScalar,RestrictedInnerSize>::type InnerPacketType;
73 LinearRequiredAlignment = unpacket_traits<LinearPacketType>::alignment,
74 InnerRequiredAlignment = unpacket_traits<InnerPacketType>::alignment
81 StorageOrdersAgree = (int(DstIsRowMajor) == int(SrcIsRowMajor)),
82 MightVectorize =
bool(StorageOrdersAgree)
84 &&
bool(functor_traits<AssignFunc>::PacketAccess),
85 MayInnerVectorize = MightVectorize
86 && int(InnerSize)!=
Dynamic && int(InnerSize)%int(InnerPacketSize)==0
87 && int(OuterStride)!=
Dynamic && int(OuterStride)%int(InnerPacketSize)==0
89 MayLinearize =
bool(StorageOrdersAgree) && (int(DstFlags) & int(SrcFlags) &
LinearAccessBit),
90 MayLinearVectorize =
bool(MightVectorize) &&
bool(MayLinearize) &&
bool(DstHasDirectAccess)
94 MaySliceVectorize =
bool(MightVectorize) &&
bool(DstHasDirectAccess)
121 : Vectorized ? InnerPacketSize
124 MayUnrollCompletely = int(Dst::SizeAtCompileTime) !=
Dynamic
125 && int(Dst::SizeAtCompileTime) * (int(DstEvaluator::CoeffReadCost)+int(SrcEvaluator::CoeffReadCost)) <=
int(UnrollingLimit),
126 MayUnrollInner = int(InnerSize) !=
Dynamic
127 && int(InnerSize) * (int(DstEvaluator::CoeffReadCost)+int(SrcEvaluator::CoeffReadCost)) <=
int(UnrollingLimit)
153 #ifdef EIGEN_DEBUG_ASSIGN
156 std::cerr <<
"DstXpr: " <<
typeid(
typename DstEvaluator::XprType).name() << std::endl;
157 std::cerr <<
"SrcXpr: " <<
typeid(
typename SrcEvaluator::XprType).name() << std::endl;
158 std::cerr.setf(std::ios::hex, std::ios::basefield);
159 std::cerr <<
"DstFlags" <<
" = " << DstFlags <<
" (" << demangle_flags(DstFlags) <<
" )" << std::endl;
160 std::cerr <<
"SrcFlags" <<
" = " << SrcFlags <<
" (" << demangle_flags(SrcFlags) <<
" )" << std::endl;
161 std::cerr.unsetf(std::ios::hex);
178 std::cerr <<
"Traversal" <<
" = " << Traversal <<
" (" << demangle_traversal(Traversal) <<
")" << std::endl;
185 std::cerr <<
"Unrolling" <<
" = " << Unrolling <<
" (" << demangle_unrolling(Unrolling) <<
")" << std::endl;
186 std::cerr << std::endl;
199 template<
typename Kernel,
int Index,
int Stop>
200 struct copy_using_evaluator_DefaultTraversal_CompleteUnrolling
203 typedef typename Kernel::DstEvaluatorType DstEvaluatorType;
204 typedef typename DstEvaluatorType::XprType DstXprType;
207 outer =
Index / DstXprType::InnerSizeAtCompileTime,
208 inner =
Index % DstXprType::InnerSizeAtCompileTime
213 kernel.assignCoeffByOuterInner(outer, inner);
214 copy_using_evaluator_DefaultTraversal_CompleteUnrolling<Kernel, Index+1, Stop>::run(kernel);
218 template<
typename Kernel,
int Stop>
219 struct copy_using_evaluator_DefaultTraversal_CompleteUnrolling<Kernel, Stop, Stop>
224 template<
typename Kernel,
int Index_,
int Stop>
225 struct copy_using_evaluator_DefaultTraversal_InnerUnrolling
229 kernel.assignCoeffByOuterInner(outer, Index_);
230 copy_using_evaluator_DefaultTraversal_InnerUnrolling<Kernel, Index_+1, Stop>::run(kernel, outer);
234 template<
typename Kernel,
int Stop>
235 struct copy_using_evaluator_DefaultTraversal_InnerUnrolling<Kernel, Stop, Stop>
244 template<
typename Kernel,
int Index,
int Stop>
245 struct copy_using_evaluator_LinearTraversal_CompleteUnrolling
249 kernel.assignCoeff(
Index);
250 copy_using_evaluator_LinearTraversal_CompleteUnrolling<Kernel, Index+1, Stop>::run(kernel);
254 template<
typename Kernel,
int Stop>
255 struct copy_using_evaluator_LinearTraversal_CompleteUnrolling<Kernel, Stop, Stop>
264 template<
typename Kernel,
int Index,
int Stop>
265 struct copy_using_evaluator_innervec_CompleteUnrolling
268 typedef typename Kernel::DstEvaluatorType DstEvaluatorType;
269 typedef typename DstEvaluatorType::XprType DstXprType;
270 typedef typename Kernel::PacketType PacketType;
273 outer =
Index / DstXprType::InnerSizeAtCompileTime,
274 inner =
Index % DstXprType::InnerSizeAtCompileTime,
275 SrcAlignment = Kernel::AssignmentTraits::SrcAlignment,
276 DstAlignment = Kernel::AssignmentTraits::DstAlignment
281 kernel.template assignPacketByOuterInner<DstAlignment, SrcAlignment, PacketType>(outer, inner);
283 copy_using_evaluator_innervec_CompleteUnrolling<Kernel, NextIndex, Stop>::run(kernel);
287 template<
typename Kernel,
int Stop>
288 struct copy_using_evaluator_innervec_CompleteUnrolling<Kernel, Stop, Stop>
293 template<
typename Kernel,
int Index_,
int Stop,
int SrcAlignment,
int DstAlignment>
294 struct copy_using_evaluator_innervec_InnerUnrolling
296 typedef typename Kernel::PacketType PacketType;
299 kernel.template assignPacketByOuterInner<DstAlignment, SrcAlignment, PacketType>(outer, Index_);
301 copy_using_evaluator_innervec_InnerUnrolling<Kernel, NextIndex, Stop, SrcAlignment, DstAlignment>::run(kernel, outer);
305 template<
typename Kernel,
int Stop,
int SrcAlignment,
int DstAlignment>
306 struct copy_using_evaluator_innervec_InnerUnrolling<Kernel, Stop, Stop, SrcAlignment, DstAlignment>
317 template<
typename Kernel,
318 int Traversal = Kernel::AssignmentTraits::Traversal,
319 int Unrolling = Kernel::AssignmentTraits::Unrolling>
320 struct dense_assignment_loop;
327 template<
typename Kernel,
int Unrolling>
333 EIGEN_INTERNAL_ERROR_PLEASE_FILE_A_BUG_REPORT)
341 template<
typename Kernel>
346 for(
Index outer = 0; outer < kernel.outerSize(); ++outer) {
347 for(
Index inner = 0; inner < kernel.innerSize(); ++inner) {
348 kernel.assignCoeffByOuterInner(outer, inner);
354 template<
typename Kernel>
359 typedef typename Kernel::DstEvaluatorType::XprType DstXprType;
360 copy_using_evaluator_DefaultTraversal_CompleteUnrolling<Kernel, 0, DstXprType::SizeAtCompileTime>::run(kernel);
364 template<
typename Kernel>
369 typedef typename Kernel::DstEvaluatorType::XprType DstXprType;
371 const Index outerSize = kernel.outerSize();
372 for(
Index outer = 0; outer < outerSize; ++outer)
373 copy_using_evaluator_DefaultTraversal_InnerUnrolling<Kernel, 0, DstXprType::InnerSizeAtCompileTime>::run(kernel, outer);
385 template <
bool IsAligned = false>
386 struct unaligned_dense_assignment_loop
389 template <
typename Kernel>
394 struct unaligned_dense_assignment_loop<false>
400 template <
typename Kernel>
405 template <
typename Kernel>
411 for (
Index index = start; index <
end; ++index)
412 kernel.assignCoeff(index);
416 template <
typename Kernel,
int Index,
int Stop>
417 struct copy_using_evaluator_linearvec_CompleteUnrolling {
419 typedef typename Kernel::DstEvaluatorType DstEvaluatorType;
420 typedef typename DstEvaluatorType::XprType DstXprType;
421 typedef typename Kernel::PacketType PacketType;
424 SrcAlignment = Kernel::AssignmentTraits::SrcAlignment,
425 DstAlignment = Kernel::AssignmentTraits::DstAlignment
429 kernel.template assignPacket<DstAlignment, SrcAlignment, PacketType>(
Index);
431 copy_using_evaluator_linearvec_CompleteUnrolling<Kernel, NextIndex, Stop>::run(kernel);
435 template <
typename Kernel,
int Stop>
436 struct copy_using_evaluator_linearvec_CompleteUnrolling<Kernel, Stop, Stop> {
440 template<
typename Kernel>
446 typedef typename Kernel::Scalar Scalar;
447 typedef typename Kernel::PacketType PacketType;
449 requestedAlignment = Kernel::AssignmentTraits::LinearRequiredAlignment,
451 dstIsAligned = int(Kernel::AssignmentTraits::DstAlignment)>=int(requestedAlignment),
452 dstAlignment = packet_traits<Scalar>::AlignedOnScalar ? int(requestedAlignment)
453 : int(Kernel::AssignmentTraits::DstAlignment),
454 srcAlignment = Kernel::AssignmentTraits::JointAlignment
456 const Index alignedStart = dstIsAligned ? 0 : internal::first_aligned<requestedAlignment>(kernel.dstDataPtr(),
size);
457 const Index alignedEnd = alignedStart + ((
size-alignedStart)/packetSize)*packetSize;
459 unaligned_dense_assignment_loop<dstIsAligned!=0>::run(kernel, 0, alignedStart);
461 for(
Index index = alignedStart; index < alignedEnd; index += packetSize)
462 kernel.template assignPacket<dstAlignment, srcAlignment, PacketType>(index);
464 unaligned_dense_assignment_loop<>::run(kernel, alignedEnd,
size);
468 template<
typename Kernel>
473 typedef typename Kernel::DstEvaluatorType::XprType DstXprType;
474 typedef typename Kernel::PacketType PacketType;
476 enum {
size = DstXprType::SizeAtCompileTime,
478 alignedSize = (int(
size)/packetSize)*packetSize };
480 copy_using_evaluator_linearvec_CompleteUnrolling<Kernel, 0, alignedSize>::run(kernel);
481 copy_using_evaluator_LinearTraversal_CompleteUnrolling<Kernel, alignedSize, size>::run(kernel);
489 template<
typename Kernel>
492 typedef typename Kernel::PacketType PacketType;
494 SrcAlignment = Kernel::AssignmentTraits::SrcAlignment,
495 DstAlignment = Kernel::AssignmentTraits::DstAlignment
499 const Index innerSize = kernel.innerSize();
500 const Index outerSize = kernel.outerSize();
502 for(
Index outer = 0; outer < outerSize; ++outer)
503 for(
Index inner = 0; inner < innerSize; inner+=packetSize)
504 kernel.template assignPacketByOuterInner<DstAlignment, SrcAlignment, PacketType>(outer, inner);
508 template<
typename Kernel>
513 typedef typename Kernel::DstEvaluatorType::XprType DstXprType;
514 copy_using_evaluator_innervec_CompleteUnrolling<Kernel, 0, DstXprType::SizeAtCompileTime>::run(kernel);
518 template<
typename Kernel>
523 typedef typename Kernel::DstEvaluatorType::XprType DstXprType;
524 typedef typename Kernel::AssignmentTraits Traits;
525 const Index outerSize = kernel.outerSize();
526 for(
Index outer = 0; outer < outerSize; ++outer)
527 copy_using_evaluator_innervec_InnerUnrolling<Kernel, 0, DstXprType::InnerSizeAtCompileTime,
528 Traits::SrcAlignment, Traits::DstAlignment>::run(kernel, outer);
536 template<
typename Kernel>
543 kernel.assignCoeff(
i);
547 template<
typename Kernel>
552 typedef typename Kernel::DstEvaluatorType::XprType DstXprType;
553 copy_using_evaluator_LinearTraversal_CompleteUnrolling<Kernel, 0, DstXprType::SizeAtCompileTime>::run(kernel);
561 template<
typename Kernel>
566 typedef typename Kernel::Scalar Scalar;
567 typedef typename Kernel::PacketType PacketType;
570 requestedAlignment = int(Kernel::AssignmentTraits::InnerRequiredAlignment),
571 alignable = packet_traits<Scalar>::AlignedOnScalar || int(Kernel::AssignmentTraits::DstAlignment)>=
sizeof(Scalar),
572 dstIsAligned =
int(Kernel::AssignmentTraits::DstAlignment)>=int(requestedAlignment),
573 dstAlignment = alignable ? int(requestedAlignment)
574 : int(Kernel::AssignmentTraits::DstAlignment)
576 const Scalar *dst_ptr = kernel.dstDataPtr();
577 if((!
bool(dstIsAligned)) && (std::uintptr_t(dst_ptr) %
sizeof(Scalar))>0)
580 return dense_assignment_loop<Kernel,DefaultTraversal,NoUnrolling>::run(kernel);
582 const Index packetAlignedMask = packetSize - 1;
583 const Index innerSize = kernel.innerSize();
584 const Index outerSize = kernel.outerSize();
585 const Index alignedStep = alignable ? (packetSize - kernel.outerStride() % packetSize) & packetAlignedMask : 0;
586 Index alignedStart = ((!alignable) ||
bool(dstIsAligned)) ? 0 : internal::first_aligned<requestedAlignment>(dst_ptr, innerSize);
588 for(
Index outer = 0; outer < outerSize; ++outer)
590 const Index alignedEnd = alignedStart + ((innerSize-alignedStart) & ~packetAlignedMask);
592 for(
Index inner = 0; inner<alignedStart ; ++inner)
593 kernel.assignCoeffByOuterInner(outer, inner);
596 for(
Index inner = alignedStart; inner<alignedEnd; inner+=packetSize)
597 kernel.template assignPacketByOuterInner<dstAlignment, Unaligned, PacketType>(outer, inner);
600 for(
Index inner = alignedEnd; inner<innerSize ; ++inner)
601 kernel.assignCoeffByOuterInner(outer, inner);
603 alignedStart =
numext::mini((alignedStart+alignedStep)%packetSize, innerSize);
608 #if EIGEN_UNALIGNED_VECTORIZE
609 template<
typename Kernel>
614 typedef typename Kernel::DstEvaluatorType::XprType DstXprType;
615 typedef typename Kernel::PacketType PacketType;
617 enum { innerSize = DstXprType::InnerSizeAtCompileTime,
619 vectorizableSize = (int(innerSize) / int(packetSize)) *
int(packetSize),
620 size = DstXprType::SizeAtCompileTime };
622 for(
Index outer = 0; outer < kernel.outerSize(); ++outer)
624 copy_using_evaluator_innervec_InnerUnrolling<Kernel, 0, vectorizableSize, 0, 0>::run(kernel, outer);
625 copy_using_evaluator_DefaultTraversal_InnerUnrolling<Kernel, vectorizableSize, innerSize>::run(kernel, outer);
642 template<
typename DstEvaluatorTypeT,
typename SrcEvaluatorTypeT,
typename Functor,
int Version = Specialized>
643 class generic_dense_assignment_kernel
646 typedef typename DstEvaluatorTypeT::XprType DstXprType;
647 typedef typename SrcEvaluatorTypeT::XprType SrcXprType;
650 typedef DstEvaluatorTypeT DstEvaluatorType;
651 typedef SrcEvaluatorTypeT SrcEvaluatorType;
652 typedef typename DstEvaluatorType::Scalar Scalar;
653 typedef copy_using_evaluator_traits<DstEvaluatorTypeT, SrcEvaluatorTypeT, Functor> AssignmentTraits;
654 typedef typename AssignmentTraits::PacketType PacketType;
658 generic_dense_assignment_kernel(DstEvaluatorType &dst,
const SrcEvaluatorType &src,
const Functor &func, DstXprType& dstExpr)
659 : m_dst(dst), m_src(src), m_functor(func), m_dstExpr(dstExpr)
661 #ifdef EIGEN_DEBUG_ASSIGN
662 AssignmentTraits::debug();
679 m_functor.assignCoeff(m_dst.coeffRef(
row,
col), m_src.coeff(
row,
col));
685 m_functor.assignCoeff(m_dst.coeffRef(index), m_src.coeff(index));
691 Index row = rowIndexByOuterInner(outer, inner);
692 Index col = colIndexByOuterInner(outer, inner);
697 template<
int StoreMode,
int LoadMode,
typename Packet>
700 m_functor.template assignPacket<StoreMode>(&m_dst.coeffRef(
row,
col), m_src.template packet<LoadMode,Packet>(
row,
col));
703 template<
int StoreMode,
int LoadMode,
typename Packet>
706 m_functor.template assignPacket<StoreMode>(&m_dst.coeffRef(index), m_src.template packet<LoadMode,Packet>(index));
709 template<
int StoreMode,
int LoadMode,
typename Packet>
712 Index row = rowIndexByOuterInner(outer, inner);
713 Index col = colIndexByOuterInner(outer, inner);
714 assignPacket<StoreMode,LoadMode,Packet>(
row,
col);
719 typedef typename DstEvaluatorType::ExpressionTraits Traits;
720 return int(Traits::RowsAtCompileTime) == 1 ? 0
721 : int(Traits::ColsAtCompileTime) == 1 ? inner
728 typedef typename DstEvaluatorType::ExpressionTraits Traits;
729 return int(Traits::ColsAtCompileTime) == 1 ? 0
730 : int(Traits::RowsAtCompileTime) == 1 ? inner
737 return m_dstExpr.data();
741 DstEvaluatorType& m_dst;
742 const SrcEvaluatorType& m_src;
743 const Functor &m_functor;
745 DstXprType& m_dstExpr;
752 template<
typename DstEvaluatorTypeT,
typename SrcEvaluatorTypeT,
typename Functor>
753 class restricted_packet_dense_assignment_kernel :
public generic_dense_assignment_kernel<DstEvaluatorTypeT, SrcEvaluatorTypeT, Functor, BuiltIn>
756 typedef generic_dense_assignment_kernel<DstEvaluatorTypeT, SrcEvaluatorTypeT, Functor, BuiltIn> Base;
758 typedef typename Base::Scalar Scalar;
759 typedef typename Base::DstXprType DstXprType;
760 typedef copy_using_evaluator_traits<DstEvaluatorTypeT, SrcEvaluatorTypeT, Functor, 4> AssignmentTraits;
761 typedef typename AssignmentTraits::PacketType PacketType;
763 EIGEN_DEVICE_FUNC restricted_packet_dense_assignment_kernel(DstEvaluatorTypeT &dst,
const SrcEvaluatorTypeT &src,
const Functor &func, DstXprType& dstExpr)
764 : Base(dst, src, func, dstExpr)
773 template<
typename DstXprType,
typename SrcXprType,
typename Functor>
779 eigen_assert(dst.rows() == src.rows() && dst.cols() == src.cols());
782 template<
typename DstXprType,
typename SrcXprType,
typename T1,
typename T2>
784 void resize_if_allowed(DstXprType &dst,
const SrcXprType& src,
const internal::assign_op<T1,T2> &)
786 Index dstRows = src.rows();
787 Index dstCols = src.cols();
788 if(((dst.rows()!=dstRows) || (dst.cols()!=dstCols)))
789 dst.resize(dstRows, dstCols);
790 eigen_assert(dst.rows() == dstRows && dst.cols() == dstCols);
793 template<
typename DstXprType,
typename SrcXprType,
typename Functor>
796 typedef evaluator<DstXprType> DstEvaluatorType;
797 typedef evaluator<SrcXprType> SrcEvaluatorType;
799 SrcEvaluatorType srcEvaluator(src);
805 DstEvaluatorType dstEvaluator(dst);
807 typedef generic_dense_assignment_kernel<DstEvaluatorType,SrcEvaluatorType,Functor> Kernel;
808 Kernel kernel(dstEvaluator, srcEvaluator, func, dst.const_cast_derived());
810 dense_assignment_loop<Kernel>::run(kernel);
814 #ifndef EIGEN_GPU_COMPILE_PHASE
815 template<
typename DstXprType>
819 std::fill_n(dst.data(), dst.size(), src.functor()());
823 template<
typename DstXprType,
typename SrcXprType>
836 template<
typename DstShape,
typename SrcShape>
struct AssignmentKind;
839 struct Dense2Dense {};
840 struct EigenBase2EigenBase {};
842 template<
typename,
typename>
struct AssignmentKind {
typedef EigenBase2EigenBase Kind; };
843 template<>
struct AssignmentKind<DenseShape,DenseShape> {
typedef Dense2Dense Kind; };
846 template<
typename DstXprType,
typename SrcXprType,
typename Functor,
847 typename Kind =
typename AssignmentKind< typename evaluator_traits<DstXprType>::Shape ,
typename evaluator_traits<SrcXprType>::Shape >::Kind,
848 typename EnableIf =
void>
857 template<
typename Dst,
typename Src>
861 call_assignment(dst, src, internal::assign_op<typename Dst::Scalar,typename Src::Scalar>());
863 template<
typename Dst,
typename Src>
867 call_assignment(dst, src, internal::assign_op<typename Dst::Scalar,typename Src::Scalar>());
871 template<
typename Dst,
typename Src,
typename Func>
873 void call_assignment(Dst& dst,
const Src& src,
const Func& func, std::enable_if_t< evaluator_assume_aliasing<Src>::value,
void*> = 0)
875 typename plain_matrix_type<Src>::type tmp(src);
879 template<
typename Dst,
typename Src,
typename Func>
881 void call_assignment(Dst& dst,
const Src& src,
const Func& func, std::enable_if_t<!evaluator_assume_aliasing<Src>::value,
void*> = 0)
888 template<
typename Dst,
template <
typename>
class StorageBase,
typename Src,
typename Func>
896 template<
typename Dst,
typename Src,
typename Func>
901 NeedToTranspose = ( (int(Dst::RowsAtCompileTime) == 1 && int(Src::ColsAtCompileTime) == 1)
902 || (
int(Dst::ColsAtCompileTime) == 1 && int(Src::RowsAtCompileTime) == 1)
903 ) && int(Dst::SizeAtCompileTime) != 1
906 typedef std::conditional_t<NeedToTranspose, Transpose<Dst>, Dst> ActualDstTypeCleaned;
907 typedef std::conditional_t<NeedToTranspose, Transpose<Dst>, Dst&> ActualDstType;
908 ActualDstType actualDst(dst);
915 Assignment<ActualDstTypeCleaned,Src,Func>::run(actualDst, src, func);
918 template<
typename Dst,
typename Src,
typename Func>
922 typedef evaluator<Dst> DstEvaluatorType;
923 typedef evaluator<Src> SrcEvaluatorType;
924 typedef restricted_packet_dense_assignment_kernel<DstEvaluatorType,SrcEvaluatorType,Func> Kernel;
929 SrcEvaluatorType srcEvaluator(src);
932 DstEvaluatorType dstEvaluator(dst);
933 Kernel kernel(dstEvaluator, srcEvaluator, func, dst.const_cast_derived());
935 dense_assignment_loop<Kernel>::run(kernel);
938 template<
typename Dst,
typename Src>
945 template<
typename Dst,
typename Src,
typename Func>
954 Assignment<Dst,Src,Func>::run(dst, src, func);
956 template<
typename Dst,
typename Src>
969 template<
typename DstXprType,
typename SrcXprType,
typename Functor,
typename Weak>
970 struct Assignment<DstXprType, SrcXprType, Functor, Dense2Dense, Weak>
973 static EIGEN_STRONG_INLINE
void run(DstXprType &dst,
const SrcXprType &src,
const Functor &func)
975 #ifndef EIGEN_NO_DEBUG
987 template<
typename DstXprType,
typename SrcXprType,
typename Functor,
typename Weak>
988 struct Assignment<DstXprType, SrcXprType, Functor, EigenBase2EigenBase, Weak>
991 static EIGEN_STRONG_INLINE
void run(DstXprType &dst,
const SrcXprType &src,
const internal::assign_op<typename DstXprType::Scalar,typename SrcXprType::Scalar> &)
993 Index dstRows = src.rows();
994 Index dstCols = src.cols();
995 if((dst.rows()!=dstRows) || (dst.cols()!=dstCols))
996 dst.resize(dstRows, dstCols);
998 eigen_assert(dst.rows() == src.rows() && dst.cols() == src.cols());
1004 template<
typename SrcScalarType>
1006 static EIGEN_STRONG_INLINE
void run(DstXprType &dst,
const SrcXprType &src,
const internal::add_assign_op<typename DstXprType::Scalar,SrcScalarType> &)
1008 Index dstRows = src.rows();
1009 Index dstCols = src.cols();
1010 if((dst.rows()!=dstRows) || (dst.cols()!=dstCols))
1011 dst.resize(dstRows, dstCols);
1013 eigen_assert(dst.rows() == src.rows() && dst.cols() == src.cols());
1017 template<
typename SrcScalarType>
1019 static EIGEN_STRONG_INLINE
void run(DstXprType &dst,
const SrcXprType &src,
const internal::sub_assign_op<typename DstXprType::Scalar,SrcScalarType> &)
1021 Index dstRows = src.rows();
1022 Index dstCols = src.cols();
1023 if((dst.rows()!=dstRows) || (dst.cols()!=dstCols))
1024 dst.resize(dstRows, dstCols);
1026 eigen_assert(dst.rows() == src.rows() && dst.cols() == src.cols());
RowXpr row(Index i)
This is the const version of row(). */.
ColXpr col(Index i)
This is the const version of col().
#define EIGEN_DEBUG_VAR(x)
#define EIGEN_DEVICE_FUNC
#define EIGEN_DONT_INLINE
#define EIGEN_ONLY_USED_FOR_DEBUG(x)
#define EIGEN_UNROLLING_LIMIT
#define EIGEN_STATIC_ASSERT_LVALUE(Derived)
#define EIGEN_STATIC_ASSERT(X, MSG)
#define EIGEN_STATIC_ASSERT_SAME_MATRIX_SIZE(TYPE0, TYPE1)
#define EIGEN_CHECK_BINARY_COMPATIBILIY(BINOP, LHS, RHS)
Generic expression of a matrix where all coefficients are defined by a functor.
Pseudo expression providing an operator = assuming no aliasing.
ExpressionType & expression() const
static const lastp1_t end
const unsigned int ActualPacketAccessBit
const unsigned int LinearAccessBit
const unsigned int DirectAccessBit
const unsigned int RowMajorBit
constexpr int plain_enum_min(A a, B b)
EIGEN_CONSTEXPR void call_assignment_no_alias_no_transpose(Dst &dst, const Src &src, const Func &func)
void check_for_aliasing(const Dst &dst, const Src &src)
void resize_if_allowed(DstXprType &dst, const SrcXprType &src, const Functor &)
void call_restricted_packet_assignment_no_alias(Dst &dst, const Src &src, const Func &func)
EIGEN_CONSTEXPR void call_dense_assignment_loop(DstXprType &dst, const SrcXprType &src, const Functor &func)
constexpr int min_size_prefer_fixed(A a, B b)
void call_assignment(Dst &dst, const Src &src)
EIGEN_CONSTEXPR void call_assignment_no_alias(Dst &dst, const Src &src, const Func &func)
EIGEN_ALWAYS_INLINE T mini(const T &x, const T &y)
@ InnerVectorizedTraversal
@ LinearVectorizedTraversal
@ SliceVectorizedTraversal
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.