AmbiVector.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // Eigen is free software; you can redistribute it and/or
7 // modify it under the terms of the GNU Lesser General Public
8 // License as published by the Free Software Foundation; either
9 // version 3 of the License, or (at your option) any later version.
10 //
11 // Alternatively, you can redistribute it and/or
12 // modify it under the terms of the GNU General Public License as
13 // published by the Free Software Foundation; either version 2 of
14 // the License, or (at your option) any later version.
15 //
16 // Eigen is distributed in the hope that it will be useful, but WITHOUT ANY
17 // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
18 // FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License or the
19 // GNU General Public License for more details.
20 //
21 // You should have received a copy of the GNU Lesser General Public
22 // License and a copy of the GNU General Public License along with
23 // Eigen. If not, see <http://www.gnu.org/licenses/>.
24 
25 #ifndef EIGEN_AMBIVECTOR_H
26 #define EIGEN_AMBIVECTOR_H
27 
28 namespace Eigen {
29 
30 namespace internal {
31 
37 template<typename _Scalar, typename _Index>
38 class AmbiVector
39 {
40  public:
41  typedef _Scalar Scalar;
42  typedef _Index Index;
43  typedef typename NumTraits<Scalar>::Real RealScalar;
44 
45  AmbiVector(Index size)
46  : m_buffer(0), m_zero(0), m_size(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1)
47  {
48  resize(size);
49  }
50 
51  void init(double estimatedDensity);
52  void init(int mode);
53 
54  Index nonZeros() const;
55 
57  void setBounds(Index start, Index end) { m_start = start; m_end = end; }
58 
59  void setZero();
60 
61  void restart();
62  Scalar& coeffRef(Index i);
63  Scalar& coeff(Index i);
64 
65  class Iterator;
66 
67  ~AmbiVector() { delete[] m_buffer; }
68 
69  void resize(Index size)
70  {
71  if (m_allocatedSize < size)
72  reallocate(size);
73  m_size = size;
74  }
75 
76  Index size() const { return m_size; }
77 
78  protected:
79 
80  void reallocate(Index size)
81  {
82  // if the size of the matrix is not too large, let's allocate a bit more than needed such
83  // that we can handle dense vector even in sparse mode.
84  delete[] m_buffer;
85  if (size<1000)
86  {
87  Index allocSize = (size * sizeof(ListEl))/sizeof(Scalar);
88  m_allocatedElements = (allocSize*sizeof(Scalar))/sizeof(ListEl);
89  m_buffer = new Scalar[allocSize];
90  }
91  else
92  {
93  m_allocatedElements = (size*sizeof(Scalar))/sizeof(ListEl);
94  m_buffer = new Scalar[size];
95  }
96  m_size = size;
97  m_start = 0;
98  m_end = m_size;
99  }
100 
101  void reallocateSparse()
102  {
103  Index copyElements = m_allocatedElements;
104  m_allocatedElements = (std::min)(Index(m_allocatedElements*1.5),m_size);
105  Index allocSize = m_allocatedElements * sizeof(ListEl);
106  allocSize = allocSize/sizeof(Scalar) + (allocSize%sizeof(Scalar)>0?1:0);
107  Scalar* newBuffer = new Scalar[allocSize];
108  memcpy(newBuffer, m_buffer, copyElements * sizeof(ListEl));
109  delete[] m_buffer;
110  m_buffer = newBuffer;
111  }
112 
113  protected:
114  // element type of the linked list
115  struct ListEl
116  {
117  Index next;
118  Index index;
119  Scalar value;
120  };
121 
122  // used to store data in both mode
123  Scalar* m_buffer;
124  Scalar m_zero;
125  Index m_size;
126  Index m_start;
127  Index m_end;
128  Index m_allocatedSize;
129  Index m_allocatedElements;
130  Index m_mode;
131 
132  // linked list mode
133  Index m_llStart;
134  Index m_llCurrent;
135  Index m_llSize;
136 };
137 
139 template<typename _Scalar,typename _Index>
140 _Index AmbiVector<_Scalar,_Index>::nonZeros() const
141 {
142  if (m_mode==IsSparse)
143  return m_llSize;
144  else
145  return m_end - m_start;
146 }
147 
148 template<typename _Scalar,typename _Index>
149 void AmbiVector<_Scalar,_Index>::init(double estimatedDensity)
150 {
151  if (estimatedDensity>0.1)
152  init(IsDense);
153  else
154  init(IsSparse);
155 }
156 
157 template<typename _Scalar,typename _Index>
158 void AmbiVector<_Scalar,_Index>::init(int mode)
159 {
160  m_mode = mode;
161  if (m_mode==IsSparse)
162  {
163  m_llSize = 0;
164  m_llStart = -1;
165  }
166 }
167 
173 template<typename _Scalar,typename _Index>
174 void AmbiVector<_Scalar,_Index>::restart()
175 {
176  m_llCurrent = m_llStart;
177 }
178 
180 template<typename _Scalar,typename _Index>
181 void AmbiVector<_Scalar,_Index>::setZero()
182 {
183  if (m_mode==IsDense)
184  {
185  for (Index i=m_start; i<m_end; ++i)
186  m_buffer[i] = Scalar(0);
187  }
188  else
189  {
190  eigen_assert(m_mode==IsSparse);
191  m_llSize = 0;
192  m_llStart = -1;
193  }
194 }
195 
196 template<typename _Scalar,typename _Index>
197 _Scalar& AmbiVector<_Scalar,_Index>::coeffRef(_Index i)
198 {
199  if (m_mode==IsDense)
200  return m_buffer[i];
201  else
202  {
203  ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
204  // TODO factorize the following code to reduce code generation
205  eigen_assert(m_mode==IsSparse);
206  if (m_llSize==0)
207  {
208  // this is the first element
209  m_llStart = 0;
210  m_llCurrent = 0;
211  ++m_llSize;
212  llElements[0].value = Scalar(0);
213  llElements[0].index = i;
214  llElements[0].next = -1;
215  return llElements[0].value;
216  }
217  else if (i<llElements[m_llStart].index)
218  {
219  // this is going to be the new first element of the list
220  ListEl& el = llElements[m_llSize];
221  el.value = Scalar(0);
222  el.index = i;
223  el.next = m_llStart;
224  m_llStart = m_llSize;
225  ++m_llSize;
226  m_llCurrent = m_llStart;
227  return el.value;
228  }
229  else
230  {
231  Index nextel = llElements[m_llCurrent].next;
232  eigen_assert(i>=llElements[m_llCurrent].index && "you must call restart() before inserting an element with lower or equal index");
233  while (nextel >= 0 && llElements[nextel].index<=i)
234  {
235  m_llCurrent = nextel;
236  nextel = llElements[nextel].next;
237  }
238 
239  if (llElements[m_llCurrent].index==i)
240  {
241  // the coefficient already exists and we found it !
242  return llElements[m_llCurrent].value;
243  }
244  else
245  {
246  if (m_llSize>=m_allocatedElements)
247  {
248  reallocateSparse();
249  llElements = reinterpret_cast<ListEl*>(m_buffer);
250  }
251  eigen_internal_assert(m_llSize<m_allocatedElements && "internal error: overflow in sparse mode");
252  // let's insert a new coefficient
253  ListEl& el = llElements[m_llSize];
254  el.value = Scalar(0);
255  el.index = i;
256  el.next = llElements[m_llCurrent].next;
257  llElements[m_llCurrent].next = m_llSize;
258  ++m_llSize;
259  return el.value;
260  }
261  }
262  }
263 }
264 
265 template<typename _Scalar,typename _Index>
266 _Scalar& AmbiVector<_Scalar,_Index>::coeff(_Index i)
267 {
268  if (m_mode==IsDense)
269  return m_buffer[i];
270  else
271  {
272  ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
273  eigen_assert(m_mode==IsSparse);
274  if ((m_llSize==0) || (i<llElements[m_llStart].index))
275  {
276  return m_zero;
277  }
278  else
279  {
280  Index elid = m_llStart;
281  while (elid >= 0 && llElements[elid].index<i)
282  elid = llElements[elid].next;
283 
284  if (llElements[elid].index==i)
285  return llElements[m_llCurrent].value;
286  else
287  return m_zero;
288  }
289  }
290 }
291 
293 template<typename _Scalar,typename _Index>
294 class AmbiVector<_Scalar,_Index>::Iterator
295 {
296  public:
297  typedef _Scalar Scalar;
298  typedef typename NumTraits<Scalar>::Real RealScalar;
299 
306  Iterator(const AmbiVector& vec, RealScalar epsilon = 0)
307  : m_vector(vec)
308  {
309  m_epsilon = epsilon;
310  m_isDense = m_vector.m_mode==IsDense;
311  if (m_isDense)
312  {
313  m_currentEl = 0; // this is to avoid a compilation warning
314  m_cachedValue = 0; // this is to avoid a compilation warning
315  m_cachedIndex = m_vector.m_start-1;
316  ++(*this);
317  }
318  else
319  {
320  ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
321  m_currentEl = m_vector.m_llStart;
322  while (m_currentEl>=0 && internal::abs(llElements[m_currentEl].value)<=m_epsilon)
323  m_currentEl = llElements[m_currentEl].next;
324  if (m_currentEl<0)
325  {
326  m_cachedValue = 0; // this is to avoid a compilation warning
327  m_cachedIndex = -1;
328  }
329  else
330  {
331  m_cachedIndex = llElements[m_currentEl].index;
332  m_cachedValue = llElements[m_currentEl].value;
333  }
334  }
335  }
336 
337  Index index() const { return m_cachedIndex; }
338  Scalar value() const { return m_cachedValue; }
339 
340  operator bool() const { return m_cachedIndex>=0; }
341 
342  Iterator& operator++()
343  {
344  if (m_isDense)
345  {
346  do {
347  ++m_cachedIndex;
348  } while (m_cachedIndex<m_vector.m_end && internal::abs(m_vector.m_buffer[m_cachedIndex])<m_epsilon);
349  if (m_cachedIndex<m_vector.m_end)
350  m_cachedValue = m_vector.m_buffer[m_cachedIndex];
351  else
352  m_cachedIndex=-1;
353  }
354  else
355  {
356  ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
357  do {
358  m_currentEl = llElements[m_currentEl].next;
359  } while (m_currentEl>=0 && internal::abs(llElements[m_currentEl].value)<m_epsilon);
360  if (m_currentEl<0)
361  {
362  m_cachedIndex = -1;
363  }
364  else
365  {
366  m_cachedIndex = llElements[m_currentEl].index;
367  m_cachedValue = llElements[m_currentEl].value;
368  }
369  }
370  return *this;
371  }
372 
373  protected:
374  const AmbiVector& m_vector; // the target vector
375  Index m_currentEl; // the current element in sparse/linked-list mode
376  RealScalar m_epsilon; // epsilon used to prune zero coefficients
377  Index m_cachedIndex; // current coordinate
378  Scalar m_cachedValue; // current value
379  bool m_isDense; // mode of the vector
380 };
381 
382 } // end namespace internal
383 
384 } // end namespace Eigen
385 
386 #endif // EIGEN_AMBIVECTOR_H