25 #ifndef EIGEN_AMBIVECTOR_H
26 #define EIGEN_AMBIVECTOR_H
37 template<
typename _Scalar,
typename _Index>
41 typedef _Scalar Scalar;
45 AmbiVector(Index size)
46 : m_buffer(0), m_zero(0), m_size(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1)
51 void init(
double estimatedDensity);
54 Index nonZeros()
const;
57 void setBounds(Index start, Index end) { m_start = start; m_end = end; }
62 Scalar& coeffRef(Index i);
63 Scalar& coeff(Index i);
67 ~AmbiVector() {
delete[] m_buffer; }
69 void resize(Index size)
71 if (m_allocatedSize < size)
76 Index size()
const {
return m_size; }
80 void reallocate(Index size)
87 Index allocSize = (size *
sizeof(ListEl))/
sizeof(Scalar);
88 m_allocatedElements = (allocSize*
sizeof(Scalar))/
sizeof(ListEl);
89 m_buffer =
new Scalar[allocSize];
93 m_allocatedElements = (size*
sizeof(Scalar))/
sizeof(ListEl);
94 m_buffer =
new Scalar[size];
101 void reallocateSparse()
103 Index copyElements = m_allocatedElements;
104 m_allocatedElements = (std::min)(Index(m_allocatedElements*1.5),m_size);
105 Index allocSize = m_allocatedElements *
sizeof(ListEl);
106 allocSize = allocSize/
sizeof(Scalar) + (allocSize%
sizeof(Scalar)>0?1:0);
107 Scalar* newBuffer =
new Scalar[allocSize];
108 memcpy(newBuffer, m_buffer, copyElements *
sizeof(ListEl));
110 m_buffer = newBuffer;
128 Index m_allocatedSize;
129 Index m_allocatedElements;
139 template<
typename _Scalar,
typename _Index>
140 _Index AmbiVector<_Scalar,_Index>::nonZeros()
const
145 return m_end - m_start;
148 template<
typename _Scalar,
typename _Index>
149 void AmbiVector<_Scalar,_Index>::init(
double estimatedDensity)
151 if (estimatedDensity>0.1)
157 template<
typename _Scalar,
typename _Index>
158 void AmbiVector<_Scalar,_Index>::init(
int mode)
173 template<
typename _Scalar,
typename _Index>
174 void AmbiVector<_Scalar,_Index>::restart()
176 m_llCurrent = m_llStart;
180 template<
typename _Scalar,
typename _Index>
181 void AmbiVector<_Scalar,_Index>::setZero()
185 for (Index i=m_start; i<m_end; ++i)
186 m_buffer[i] = Scalar(0);
196 template<
typename _Scalar,
typename _Index>
197 _Scalar& AmbiVector<_Scalar,_Index>::coeffRef(_Index i)
203 ListEl*
EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
212 llElements[0].value = Scalar(0);
213 llElements[0].index = i;
214 llElements[0].next = -1;
215 return llElements[0].value;
217 else if (i<llElements[m_llStart].index)
220 ListEl& el = llElements[m_llSize];
221 el.value = Scalar(0);
224 m_llStart = m_llSize;
226 m_llCurrent = m_llStart;
231 Index nextel = llElements[m_llCurrent].next;
232 eigen_assert(i>=llElements[m_llCurrent].index &&
"you must call restart() before inserting an element with lower or equal index");
233 while (nextel >= 0 && llElements[nextel].index<=i)
235 m_llCurrent = nextel;
236 nextel = llElements[nextel].next;
239 if (llElements[m_llCurrent].index==i)
242 return llElements[m_llCurrent].value;
246 if (m_llSize>=m_allocatedElements)
249 llElements =
reinterpret_cast<ListEl*
>(m_buffer);
253 ListEl& el = llElements[m_llSize];
254 el.value = Scalar(0);
256 el.next = llElements[m_llCurrent].next;
257 llElements[m_llCurrent].next = m_llSize;
265 template<
typename _Scalar,
typename _Index>
266 _Scalar& AmbiVector<_Scalar,_Index>::coeff(_Index i)
272 ListEl*
EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
274 if ((m_llSize==0) || (i<llElements[m_llStart].index))
280 Index elid = m_llStart;
281 while (elid >= 0 && llElements[elid].index<i)
282 elid = llElements[elid].next;
284 if (llElements[elid].index==i)
285 return llElements[m_llCurrent].value;
293 template<
typename _Scalar,
typename _Index>
294 class AmbiVector<_Scalar,_Index>::Iterator
297 typedef _Scalar Scalar;
306 Iterator(
const AmbiVector& vec, RealScalar epsilon = 0)
310 m_isDense = m_vector.m_mode==
IsDense;
315 m_cachedIndex = m_vector.m_start-1;
320 ListEl*
EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
321 m_currentEl = m_vector.m_llStart;
322 while (m_currentEl>=0 &&
internal::abs(llElements[m_currentEl].value)<=m_epsilon)
323 m_currentEl = llElements[m_currentEl].next;
331 m_cachedIndex = llElements[m_currentEl].index;
332 m_cachedValue = llElements[m_currentEl].value;
337 Index index()
const {
return m_cachedIndex; }
338 Scalar value()
const {
return m_cachedValue; }
340 operator bool()
const {
return m_cachedIndex>=0; }
342 Iterator& operator++()
348 }
while (m_cachedIndex<m_vector.m_end &&
internal::abs(m_vector.m_buffer[m_cachedIndex])<m_epsilon);
349 if (m_cachedIndex<m_vector.m_end)
350 m_cachedValue = m_vector.m_buffer[m_cachedIndex];
356 ListEl*
EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
358 m_currentEl = llElements[m_currentEl].next;
359 }
while (m_currentEl>=0 &&
internal::abs(llElements[m_currentEl].value)<m_epsilon);
366 m_cachedIndex = llElements[m_currentEl].index;
367 m_cachedValue = llElements[m_currentEl].value;
374 const AmbiVector& m_vector;
376 RealScalar m_epsilon;
378 Scalar m_cachedValue;
386 #endif // EIGEN_AMBIVECTOR_H