25 #ifndef EIGEN_COMPRESSED_STORAGE_H
26 #define EIGEN_COMPRESSED_STORAGE_H
36 template<
typename _Scalar,
typename _Index>
37 class CompressedStorage
41 typedef _Scalar Scalar;
51 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
54 CompressedStorage(
size_t size)
55 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
60 CompressedStorage(
const CompressedStorage& other)
61 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
66 CompressedStorage& operator=(
const CompressedStorage& other)
69 memcpy(m_values, other.m_values, m_size *
sizeof(Scalar));
70 memcpy(m_indices, other.m_indices, m_size *
sizeof(Index));
74 void swap(CompressedStorage& other)
76 std::swap(m_values, other.m_values);
77 std::swap(m_indices, other.m_indices);
78 std::swap(m_size, other.m_size);
79 std::swap(m_allocatedSize, other.m_allocatedSize);
88 void reserve(
size_t size)
90 size_t newAllocatedSize = m_size + size;
91 if (newAllocatedSize > m_allocatedSize)
92 reallocate(newAllocatedSize);
97 if (m_allocatedSize>m_size)
101 void resize(
size_t size,
float reserveSizeFactor = 0)
103 if (m_allocatedSize<size)
104 reallocate(size +
size_t(reserveSizeFactor*size));
108 void append(
const Scalar& v, Index i)
110 Index
id =
static_cast<Index
>(m_size);
116 inline size_t size()
const {
return m_size; }
117 inline size_t allocatedSize()
const {
return m_allocatedSize; }
118 inline void clear() { m_size = 0; }
120 inline Scalar& value(
size_t i) {
return m_values[i]; }
121 inline const Scalar& value(
size_t i)
const {
return m_values[i]; }
123 inline Index& index(
size_t i) {
return m_indices[i]; }
124 inline const Index& index(
size_t i)
const {
return m_indices[i]; }
126 static CompressedStorage Map(Index* indices, Scalar* values,
size_t size)
128 CompressedStorage res;
129 res.m_indices = indices;
130 res.m_values = values;
131 res.m_allocatedSize = res.m_size = size;
136 inline Index searchLowerIndex(Index key)
const
138 return searchLowerIndex(0, m_size, key);
142 inline Index searchLowerIndex(
size_t start,
size_t end, Index key)
const
146 size_t mid = (end+start)>>1;
147 if (m_indices[mid]<key)
152 return static_cast<Index
>(start);
157 inline Scalar at(Index key, Scalar defaultValue = Scalar(0))
const
161 else if (key==m_indices[m_size-1])
162 return m_values[m_size-1];
165 const size_t id = searchLowerIndex(0,m_size-1,key);
166 return ((
id<m_size) && (m_indices[
id]==key)) ? m_values[id] : defaultValue;
170 inline Scalar atInRange(
size_t start,
size_t end, Index key, Scalar defaultValue = Scalar(0))
const
174 else if (end>start && key==m_indices[end-1])
175 return m_values[end-1];
178 const size_t id = searchLowerIndex(start,end-1,key);
179 return ((
id<end) && (m_indices[
id]==key)) ? m_values[id] : defaultValue;
185 inline Scalar& atWithInsertion(Index key, Scalar defaultValue = Scalar(0))
187 size_t id = searchLowerIndex(0,m_size,key);
188 if (
id>=m_size || m_indices[
id]!=key)
191 for (
size_t j=m_size-1; j>id; --j)
193 m_indices[j] = m_indices[j-1];
194 m_values[j] = m_values[j-1];
197 m_values[id] = defaultValue;
202 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
206 for (
size_t i=0; i<n; ++i)
220 inline void reallocate(
size_t size)
222 Scalar* newValues =
new Scalar[size];
223 Index* newIndices =
new Index[size];
224 size_t copySize = (std::min)(size, m_size);
231 m_values = newValues;
232 m_indices = newIndices;
233 m_allocatedSize = size;
240 size_t m_allocatedSize;
248 #endif // EIGEN_COMPRESSED_STORAGE_H