10 #ifndef EIGEN_AMBIVECTOR_H
11 #define EIGEN_AMBIVECTOR_H
24 template<
typename Scalar_,
typename StorageIndex_>
28 typedef Scalar_ Scalar;
29 typedef StorageIndex_ StorageIndex;
33 : m_buffer(0), m_zero(0), m_size(0), m_end(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1)
38 void init(
double estimatedDensity);
41 Index nonZeros()
const;
54 ~AmbiVector() {
delete[] m_buffer; }
58 if (m_allocatedSize <
size)
63 StorageIndex
size()
const {
return m_size; }
68 return internal::convert_index<StorageIndex>(idx);
78 Index allocSize = (
size *
sizeof(ListEl) +
sizeof(Scalar) - 1)/
sizeof(Scalar);
79 m_allocatedElements =
convert_index((allocSize*
sizeof(Scalar))/
sizeof(ListEl));
80 m_buffer =
new Scalar[allocSize];
85 m_buffer =
new Scalar[
size];
92 void reallocateSparse()
94 Index copyElements = m_allocatedElements;
95 m_allocatedElements = (
std::min)(StorageIndex(m_allocatedElements*1.5),m_size);
96 Index allocSize = m_allocatedElements *
sizeof(ListEl);
97 allocSize = (allocSize +
sizeof(Scalar) - 1)/
sizeof(Scalar);
98 Scalar* newBuffer =
new Scalar[allocSize];
99 std::memcpy(newBuffer, m_buffer, copyElements *
sizeof(ListEl));
101 m_buffer = newBuffer;
117 StorageIndex m_start;
119 StorageIndex m_allocatedSize;
120 StorageIndex m_allocatedElements;
124 StorageIndex m_llStart;
125 StorageIndex m_llCurrent;
126 StorageIndex m_llSize;
130 template<
typename Scalar_,
typename StorageIndex_>
131 Index AmbiVector<Scalar_,StorageIndex_>::nonZeros()
const
136 return m_end - m_start;
139 template<
typename Scalar_,
typename StorageIndex_>
140 void AmbiVector<Scalar_,StorageIndex_>::init(
double estimatedDensity)
142 if (estimatedDensity>0.1)
148 template<
typename Scalar_,
typename StorageIndex_>
149 void AmbiVector<Scalar_,StorageIndex_>::init(
int mode)
165 template<
typename Scalar_,
typename StorageIndex_>
166 void AmbiVector<Scalar_,StorageIndex_>::restart()
168 m_llCurrent = m_llStart;
172 template<
typename Scalar_,
typename StorageIndex_>
178 m_buffer[
i] = Scalar(0);
188 template<
typename Scalar_,
typename StorageIndex_>
189 Scalar_& AmbiVector<Scalar_,StorageIndex_>::coeffRef(
Index i)
195 ListEl*
EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
204 llElements[0].value = Scalar(0);
206 llElements[0].next = -1;
207 return llElements[0].value;
209 else if (
i<llElements[m_llStart].index)
212 ListEl& el = llElements[m_llSize];
213 el.value = Scalar(0);
216 m_llStart = m_llSize;
218 m_llCurrent = m_llStart;
223 StorageIndex nextel = llElements[m_llCurrent].next;
224 eigen_assert(
i>=llElements[m_llCurrent].index &&
"you must call restart() before inserting an element with lower or equal index");
225 while (nextel >= 0 && llElements[nextel].index<=
i)
227 m_llCurrent = nextel;
228 nextel = llElements[nextel].next;
231 if (llElements[m_llCurrent].index==
i)
234 return llElements[m_llCurrent].value;
238 if (m_llSize>=m_allocatedElements)
241 llElements =
reinterpret_cast<ListEl*
>(m_buffer);
245 ListEl& el = llElements[m_llSize];
246 el.value = Scalar(0);
248 el.next = llElements[m_llCurrent].next;
249 llElements[m_llCurrent].next = m_llSize;
257 template<
typename Scalar_,
typename StorageIndex_>
258 Scalar_& AmbiVector<Scalar_,StorageIndex_>::coeff(
Index i)
264 ListEl*
EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
266 if ((m_llSize==0) || (
i<llElements[m_llStart].index))
272 Index elid = m_llStart;
273 while (elid >= 0 && llElements[elid].index<
i)
274 elid = llElements[elid].next;
276 if (llElements[elid].index==
i)
277 return llElements[m_llCurrent].value;
285 template<
typename Scalar_,
typename StorageIndex_>
286 class AmbiVector<Scalar_,StorageIndex_>::Iterator
289 typedef Scalar_ Scalar;
298 explicit Iterator(
const AmbiVector& vec,
const RealScalar& epsilon = 0)
303 m_isDense = m_vector.m_mode==
IsDense;
308 m_cachedIndex = m_vector.m_start-1;
313 ListEl*
EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
314 m_currentEl = m_vector.m_llStart;
315 while (m_currentEl>=0 &&
abs(llElements[m_currentEl].value)<=m_epsilon)
316 m_currentEl = llElements[m_currentEl].next;
324 m_cachedIndex = llElements[m_currentEl].index;
325 m_cachedValue = llElements[m_currentEl].value;
330 StorageIndex index()
const {
return m_cachedIndex; }
331 Scalar value()
const {
return m_cachedValue; }
333 operator bool()
const {
return m_cachedIndex>=0; }
342 }
while (m_cachedIndex<m_vector.m_end &&
abs(m_vector.m_buffer[m_cachedIndex])<=m_epsilon);
343 if (m_cachedIndex<m_vector.m_end)
344 m_cachedValue = m_vector.m_buffer[m_cachedIndex];
350 ListEl*
EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
352 m_currentEl = llElements[m_currentEl].next;
353 }
while (m_currentEl>=0 &&
abs(llElements[m_currentEl].value)<=m_epsilon);
360 m_cachedIndex = llElements[m_currentEl].index;
361 m_cachedValue = llElements[m_currentEl].value;
368 const AmbiVector& m_vector;
369 StorageIndex m_currentEl;
370 RealScalar m_epsilon;
371 StorageIndex m_cachedIndex;
372 Scalar m_cachedValue;
const AbsReturnType abs() const
#define eigen_internal_assert(x)
static const lastp1_t end
bfloat16 operator++(bfloat16 &a)
bfloat16() min(const bfloat16 &a, const bfloat16 &b)
IndexDest convert_index(const IndexSrc &idx)
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
const Eigen::CwiseUnaryOp< Eigen::internal::scalar_abs_op< typename Derived::Scalar >, const Derived > abs(const Eigen::ArrayBase< Derived > &x)