10 #ifndef EIGEN_COMPRESSED_STORAGE_H
11 #define EIGEN_COMPRESSED_STORAGE_H
23 template<
typename Scalar_,
typename StorageIndex_>
24 class CompressedStorage
28 typedef Scalar_ Scalar;
29 typedef StorageIndex_ StorageIndex;
38 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
42 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
47 CompressedStorage(
const CompressedStorage& other)
48 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
53 CompressedStorage& operator=(
const CompressedStorage& other)
64 void swap(CompressedStorage& other)
69 std::swap(m_allocatedSize, other.m_allocatedSize);
74 conditional_aligned_delete_auto<Scalar, true>(m_values, m_allocatedSize);
75 conditional_aligned_delete_auto<StorageIndex, true>(m_indices, m_allocatedSize);
81 if (newAllocatedSize > m_allocatedSize)
82 reallocate(newAllocatedSize);
87 if (m_allocatedSize>m_size)
93 if (m_allocatedSize<
size)
95 Index realloc_size = (std::min<Index>)(NumTraits<StorageIndex>::highest(),
size +
Index(reserveSizeFactor*
double(
size)));
98 reallocate(realloc_size);
103 void append(
const Scalar&
v,
Index i)
108 m_indices[id] = internal::convert_index<StorageIndex>(
i);
111 inline Index size()
const {
return m_size; }
112 inline Index allocatedSize()
const {
return m_allocatedSize; }
113 inline void clear() { m_size = 0; }
115 const Scalar* valuePtr()
const {
return m_values; }
116 Scalar* valuePtr() {
return m_values; }
117 const StorageIndex* indexPtr()
const {
return m_indices; }
118 StorageIndex* indexPtr() {
return m_indices; }
127 inline Index searchLowerIndex(
Index key)
const
129 return searchLowerIndex(0, m_size, key);
135 return static_cast<Index>(std::distance(m_indices, std::lower_bound(m_indices + start, m_indices +
end, key)));
140 inline Scalar at(
Index key,
const Scalar& defaultValue = Scalar(0))
const
144 else if (key==m_indices[m_size-1])
145 return m_values[m_size-1];
148 const Index id = searchLowerIndex(0,m_size-1,key);
149 return ((
id<m_size) && (m_indices[
id]==key)) ? m_values[id] : defaultValue;
153 inline Scalar atInRange(
Index start,
Index end,
Index key,
const Scalar &defaultValue = Scalar(0))
const
157 else if (
end>start && key==m_indices[
end-1])
158 return m_values[
end-1];
161 const Index id = searchLowerIndex(start,
end-1,key);
162 return ((
id<
end) && (m_indices[
id]==key)) ? m_values[id] : defaultValue;
168 inline Scalar& atWithInsertion(
Index key,
const Scalar& defaultValue = Scalar(0))
170 Index id = searchLowerIndex(0,m_size,key);
171 if (
id>=m_size || m_indices[
id]!=key)
173 if (m_allocatedSize<m_size+1)
175 Index newAllocatedSize = 2 * (m_size + 1);
176 m_values = conditional_aligned_realloc_new_auto<Scalar, true>(m_values, newAllocatedSize, m_allocatedSize);
178 conditional_aligned_realloc_new_auto<StorageIndex, true>(m_indices, newAllocatedSize, m_allocatedSize);
179 m_allocatedSize = newAllocatedSize;
187 m_indices[id] = internal::convert_index<StorageIndex>(key);
188 m_values[id] = defaultValue;
204 #ifdef EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
205 EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
208 m_values = conditional_aligned_realloc_new_auto<Scalar, true>(m_values,
size, m_allocatedSize);
209 m_indices = conditional_aligned_realloc_new_auto<StorageIndex, true>(m_indices,
size, m_allocatedSize);
210 m_allocatedSize =
size;
215 StorageIndex* m_indices;
217 Index m_allocatedSize;
Array< int, Dynamic, 1 > v
#define eigen_internal_assert(x)
static const lastp1_t end
void throw_std_bad_alloc()
void smart_memmove(const T *start, const T *end, T *target)
void smart_copy(const T *start, const T *end, T *target)
void swap(scoped_array< T > &a, scoped_array< T > &b)
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.