Eigen  3.2.92
CompressedStorage.h
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008-2014 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_COMPRESSED_STORAGE_H
11 #define EIGEN_COMPRESSED_STORAGE_H
12 
13 namespace Eigen {
14 
15 namespace internal {
16 
21 template<typename _Scalar,typename _StorageIndex>
22 class CompressedStorage
23 {
24  public:
25 
26  typedef _Scalar Scalar;
27  typedef _StorageIndex StorageIndex;
28 
29  protected:
30 
31  typedef typename NumTraits<Scalar>::Real RealScalar;
32 
33  public:
34 
35  CompressedStorage()
36  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
37  {}
38 
39  explicit CompressedStorage(Index size)
40  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
41  {
42  resize(size);
43  }
44 
45  CompressedStorage(const CompressedStorage& other)
46  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
47  {
48  *this = other;
49  }
50 
51  CompressedStorage& operator=(const CompressedStorage& other)
52  {
53  resize(other.size());
54  if(other.size()>0)
55  {
56  internal::smart_copy(other.m_values, other.m_values + m_size, m_values);
57  internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
58  }
59  return *this;
60  }
61 
62  void swap(CompressedStorage& other)
63  {
64  std::swap(m_values, other.m_values);
65  std::swap(m_indices, other.m_indices);
66  std::swap(m_size, other.m_size);
67  std::swap(m_allocatedSize, other.m_allocatedSize);
68  }
69 
70  ~CompressedStorage()
71  {
72  delete[] m_values;
73  delete[] m_indices;
74  }
75 
76  void reserve(Index size)
77  {
78  Index newAllocatedSize = m_size + size;
79  if (newAllocatedSize > m_allocatedSize)
80  reallocate(newAllocatedSize);
81  }
82 
83  void squeeze()
84  {
85  if (m_allocatedSize>m_size)
86  reallocate(m_size);
87  }
88 
89  void resize(Index size, double reserveSizeFactor = 0)
90  {
91  if (m_allocatedSize<size)
92  {
93  Index realloc_size = (std::min<Index>)(NumTraits<StorageIndex>::highest(), size + Index(reserveSizeFactor*double(size)));
94  if(realloc_size<size)
95  internal::throw_std_bad_alloc();
96  reallocate(realloc_size);
97  }
98  m_size = size;
99  }
100 
101  void append(const Scalar& v, Index i)
102  {
103  Index id = m_size;
104  resize(m_size+1, 1);
105  m_values[id] = v;
106  m_indices[id] = internal::convert_index<StorageIndex>(i);
107  }
108 
109  inline Index size() const { return m_size; }
110  inline Index allocatedSize() const { return m_allocatedSize; }
111  inline void clear() { m_size = 0; }
112 
113  inline Scalar& value(Index i) { return m_values[i]; }
114  inline const Scalar& value(Index i) const { return m_values[i]; }
115 
116  inline StorageIndex& index(Index i) { return m_indices[i]; }
117  inline const StorageIndex& index(Index i) const { return m_indices[i]; }
118 
120  inline Index searchLowerIndex(Index key) const
121  {
122  return searchLowerIndex(0, m_size, key);
123  }
124 
126  inline Index searchLowerIndex(Index start, Index end, Index key) const
127  {
128  while(end>start)
129  {
130  Index mid = (end+start)>>1;
131  if (m_indices[mid]<key)
132  start = mid+1;
133  else
134  end = mid;
135  }
136  return start;
137  }
138 
141  inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const
142  {
143  if (m_size==0)
144  return defaultValue;
145  else if (key==m_indices[m_size-1])
146  return m_values[m_size-1];
147  // ^^ optimization: let's first check if it is the last coefficient
148  // (very common in high level algorithms)
149  const Index id = searchLowerIndex(0,m_size-1,key);
150  return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
151  }
152 
154  inline Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue = Scalar(0)) const
155  {
156  if (start>=end)
157  return defaultValue;
158  else if (end>start && key==m_indices[end-1])
159  return m_values[end-1];
160  // ^^ optimization: let's first check if it is the last coefficient
161  // (very common in high level algorithms)
162  const Index id = searchLowerIndex(start,end-1,key);
163  return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
164  }
165 
169  inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0))
170  {
171  Index id = searchLowerIndex(0,m_size,key);
172  if (id>=m_size || m_indices[id]!=key)
173  {
174  if (m_allocatedSize<m_size+1)
175  {
176  m_allocatedSize = 2*(m_size+1);
177  internal::scoped_array<Scalar> newValues(m_allocatedSize);
178  internal::scoped_array<StorageIndex> newIndices(m_allocatedSize);
179 
180  // copy first chunk
181  internal::smart_copy(m_values, m_values +id, newValues.ptr());
182  internal::smart_copy(m_indices, m_indices+id, newIndices.ptr());
183 
184  // copy the rest
185  if(m_size>id)
186  {
187  internal::smart_copy(m_values +id, m_values +m_size, newValues.ptr() +id+1);
188  internal::smart_copy(m_indices+id, m_indices+m_size, newIndices.ptr()+id+1);
189  }
190  std::swap(m_values,newValues.ptr());
191  std::swap(m_indices,newIndices.ptr());
192  }
193  else if(m_size>id)
194  {
195  internal::smart_memmove(m_values +id, m_values +m_size, m_values +id+1);
196  internal::smart_memmove(m_indices+id, m_indices+m_size, m_indices+id+1);
197  }
198  m_size++;
199  m_indices[id] = internal::convert_index<StorageIndex>(key);
200  m_values[id] = defaultValue;
201  }
202  return m_values[id];
203  }
204 
205  void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
206  {
207  Index k = 0;
208  Index n = size();
209  for (Index i=0; i<n; ++i)
210  {
211  if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
212  {
213  value(k) = value(i);
214  index(k) = index(i);
215  ++k;
216  }
217  }
218  resize(k,0);
219  }
220 
221  protected:
222 
223  inline void reallocate(Index size)
224  {
225  #ifdef EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
226  EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
227  #endif
228  eigen_internal_assert(size!=m_allocatedSize);
229  internal::scoped_array<Scalar> newValues(size);
230  internal::scoped_array<StorageIndex> newIndices(size);
231  Index copySize = (std::min)(size, m_size);
232  if (copySize>0) {
233  internal::smart_copy(m_values, m_values+copySize, newValues.ptr());
234  internal::smart_copy(m_indices, m_indices+copySize, newIndices.ptr());
235  }
236  std::swap(m_values,newValues.ptr());
237  std::swap(m_indices,newIndices.ptr());
238  m_allocatedSize = size;
239  }
240 
241  protected:
242  Scalar* m_values;
243  StorageIndex* m_indices;
244  Index m_size;
245  Index m_allocatedSize;
246 
247 };
248 
249 } // end namespace internal
250 
251 } // end namespace Eigen
252 
253 #endif // EIGEN_COMPRESSED_STORAGE_H
Definition: LDLT.h:16
Definition: Eigen_Colamd.h:54