CompressedStorage.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008-2014 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_COMPRESSED_STORAGE_H
11 #define EIGEN_COMPRESSED_STORAGE_H
12 
13 #include "./InternalHeaderCheck.h"
14 
15 namespace Eigen {
16 
17 namespace internal {
18 
23 template<typename Scalar_,typename StorageIndex_>
24 class CompressedStorage
25 {
26  public:
27 
28  typedef Scalar_ Scalar;
29  typedef StorageIndex_ StorageIndex;
30 
31  protected:
32 
33  typedef typename NumTraits<Scalar>::Real RealScalar;
34 
35  public:
36 
37  CompressedStorage()
38  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
39  {}
40 
41  explicit CompressedStorage(Index size)
42  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
43  {
44  resize(size);
45  }
46 
47  CompressedStorage(const CompressedStorage& other)
48  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
49  {
50  *this = other;
51  }
52 
53  CompressedStorage& operator=(const CompressedStorage& other)
54  {
55  resize(other.size());
56  if(other.size()>0)
57  {
58  internal::smart_copy(other.m_values, other.m_values + m_size, m_values);
59  internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
60  }
61  return *this;
62  }
63 
64  void swap(CompressedStorage& other)
65  {
66  std::swap(m_values, other.m_values);
67  std::swap(m_indices, other.m_indices);
68  std::swap(m_size, other.m_size);
69  std::swap(m_allocatedSize, other.m_allocatedSize);
70  }
71 
72  ~CompressedStorage()
73  {
74  conditional_aligned_delete_auto<Scalar, true>(m_values, m_allocatedSize);
75  conditional_aligned_delete_auto<StorageIndex, true>(m_indices, m_allocatedSize);
76  }
77 
78  void reserve(Index size)
79  {
80  Index newAllocatedSize = m_size + size;
81  if (newAllocatedSize > m_allocatedSize)
82  reallocate(newAllocatedSize);
83  }
84 
85  void squeeze()
86  {
87  if (m_allocatedSize>m_size)
88  reallocate(m_size);
89  }
90 
91  void resize(Index size, double reserveSizeFactor = 0)
92  {
93  if (m_allocatedSize<size)
94  {
95  Index realloc_size = (std::min<Index>)(NumTraits<StorageIndex>::highest(), size + Index(reserveSizeFactor*double(size)));
96  if(realloc_size<size)
98  reallocate(realloc_size);
99  }
100  m_size = size;
101  }
102 
103  void append(const Scalar& v, Index i)
104  {
105  Index id = m_size;
106  resize(m_size+1, 1);
107  m_values[id] = v;
108  m_indices[id] = internal::convert_index<StorageIndex>(i);
109  }
110 
111  inline Index size() const { return m_size; }
112  inline Index allocatedSize() const { return m_allocatedSize; }
113  inline void clear() { m_size = 0; }
114 
115  const Scalar* valuePtr() const { return m_values; }
116  Scalar* valuePtr() { return m_values; }
117  const StorageIndex* indexPtr() const { return m_indices; }
118  StorageIndex* indexPtr() { return m_indices; }
119 
120  inline Scalar& value(Index i) { eigen_internal_assert(m_values!=0); return m_values[i]; }
121  inline const Scalar& value(Index i) const { eigen_internal_assert(m_values!=0); return m_values[i]; }
122 
123  inline StorageIndex& index(Index i) { eigen_internal_assert(m_indices!=0); return m_indices[i]; }
124  inline const StorageIndex& index(Index i) const { eigen_internal_assert(m_indices!=0); return m_indices[i]; }
125 
127  inline Index searchLowerIndex(Index key) const
128  {
129  return searchLowerIndex(0, m_size, key);
130  }
131 
133  inline Index searchLowerIndex(Index start, Index end, Index key) const
134  {
135  return static_cast<Index>(std::distance(m_indices, std::lower_bound(m_indices + start, m_indices + end, key)));
136  }
137 
140  inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const
141  {
142  if (m_size==0)
143  return defaultValue;
144  else if (key==m_indices[m_size-1])
145  return m_values[m_size-1];
146  // ^^ optimization: let's first check if it is the last coefficient
147  // (very common in high level algorithms)
148  const Index id = searchLowerIndex(0,m_size-1,key);
149  return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
150  }
151 
153  inline Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue = Scalar(0)) const
154  {
155  if (start>=end)
156  return defaultValue;
157  else if (end>start && key==m_indices[end-1])
158  return m_values[end-1];
159  // ^^ optimization: let's first check if it is the last coefficient
160  // (very common in high level algorithms)
161  const Index id = searchLowerIndex(start,end-1,key);
162  return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
163  }
164 
168  inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0))
169  {
170  Index id = searchLowerIndex(0,m_size,key);
171  if (id>=m_size || m_indices[id]!=key)
172  {
173  if (m_allocatedSize<m_size+1)
174  {
175  Index newAllocatedSize = 2 * (m_size + 1);
176  m_values = conditional_aligned_realloc_new_auto<Scalar, true>(m_values, newAllocatedSize, m_allocatedSize);
177  m_indices =
178  conditional_aligned_realloc_new_auto<StorageIndex, true>(m_indices, newAllocatedSize, m_allocatedSize);
179  m_allocatedSize = newAllocatedSize;
180  }
181  if(m_size>id)
182  {
183  internal::smart_memmove(m_values +id, m_values +m_size, m_values +id+1);
184  internal::smart_memmove(m_indices+id, m_indices+m_size, m_indices+id+1);
185  }
186  m_size++;
187  m_indices[id] = internal::convert_index<StorageIndex>(key);
188  m_values[id] = defaultValue;
189  }
190  return m_values[id];
191  }
192 
193  inline void moveChunk(Index from, Index to, Index chunkSize)
194  {
195  eigen_internal_assert(chunkSize >= 0 && to+chunkSize <= m_size);
196  internal::smart_memmove(m_values + from, m_values + from + chunkSize, m_values + to);
197  internal::smart_memmove(m_indices + from, m_indices + from + chunkSize, m_indices + to);
198  }
199 
200  protected:
201 
202  inline void reallocate(Index size)
203  {
204  #ifdef EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
205  EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
206  #endif
207  eigen_internal_assert(size!=m_allocatedSize);
208  m_values = conditional_aligned_realloc_new_auto<Scalar, true>(m_values, size, m_allocatedSize);
209  m_indices = conditional_aligned_realloc_new_auto<StorageIndex, true>(m_indices, size, m_allocatedSize);
210  m_allocatedSize = size;
211  }
212 
213  protected:
214  Scalar* m_values;
215  StorageIndex* m_indices;
216  Index m_size;
217  Index m_allocatedSize;
218 
219 };
220 
221 } // end namespace internal
222 
223 } // end namespace Eigen
224 
225 #endif // EIGEN_COMPRESSED_STORAGE_H
Array< int, Dynamic, 1 > v
#define eigen_internal_assert(x)
Definition: Macros.h:908
v resize(3)
static const lastp1_t end
void throw_std_bad_alloc()
Definition: Memory.h:117
void smart_memmove(const T *start, const T *end, T *target)
Definition: Memory.h:625
void smart_copy(const T *start, const T *end, T *target)
Definition: Memory.h:601
void swap(scoped_array< T > &a, scoped_array< T > &b)
Definition: Memory.h:788
: InteropHeaders
Definition: Core:139
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:82