25 #ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
26 #define EIGEN_DYNAMIC_SPARSEMATRIX_H
51 template<
typename _Scalar,
int _Options,
typename _Index>
52 struct traits<DynamicSparseMatrix<_Scalar, _Options, _Index> >
54 typedef _Scalar Scalar;
56 typedef Sparse StorageKind;
57 typedef MatrixXpr XprKind;
64 CoeffReadCost = NumTraits<Scalar>::ReadCost,
65 SupportedAccessPatterns = OuterRandomAccessPattern
70 template<
typename _Scalar,
int _Options,
typename _Index>
72 :
public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _Index> >
79 typedef MappedSparseMatrix<Scalar,Flags> Map;
80 using Base::IsRowMajor;
81 using Base::operator=;
91 std::vector<internal::CompressedStorage<Scalar,Index> > m_data;
95 inline Index rows()
const {
return IsRowMajor ? outerSize() : m_innerSize; }
96 inline Index cols()
const {
return IsRowMajor ? m_innerSize : outerSize(); }
97 inline Index innerSize()
const {
return m_innerSize; }
98 inline Index outerSize()
const {
return static_cast<Index
>(m_data.size()); }
99 inline Index innerNonZeros(Index j)
const {
return m_data[j].size(); }
101 std::vector<internal::CompressedStorage<Scalar,Index> >& _data() {
return m_data; }
102 const std::vector<internal::CompressedStorage<Scalar,Index> >& _data()
const {
return m_data; }
107 inline Scalar
coeff(Index row, Index col)
const
109 const Index outer = IsRowMajor ? row : col;
110 const Index inner = IsRowMajor ? col : row;
111 return m_data[outer].at(inner);
120 const Index outer = IsRowMajor ? row : col;
121 const Index inner = IsRowMajor ? col : row;
122 return m_data[outer].atWithInsertion(inner);
126 class ReverseInnerIterator;
130 for (Index j=0; j<outerSize(); ++j)
138 for (Index j=0; j<outerSize(); ++j)
139 res += static_cast<Index>(m_data[j].size());
145 void reserve(Index reserveSize = 1000)
149 Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),Index(4));
150 for (Index j=0; j<outerSize(); ++j)
152 m_data[j].reserve(reserveSizePerVector);
173 eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize &&
"out of range");
174 eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner))
175 &&
"wrong sorted insertion");
176 m_data[outer].append(0, inner);
177 return m_data[outer].value(m_data[outer].size()-1);
180 inline Scalar& insert(Index row, Index col)
182 const Index outer = IsRowMajor ? row : col;
183 const Index inner = IsRowMajor ? col : row;
186 Index
id =
static_cast<Index
>(m_data[outer].size()) - 1;
187 m_data[outer].resize(
id+2,1);
189 while ( (
id >= startId) && (m_data[outer].index(
id) > inner) )
191 m_data[outer].index(
id+1) = m_data[outer].index(
id);
192 m_data[outer].value(
id+1) = m_data[outer].value(
id);
195 m_data[outer].index(
id+1) = inner;
196 m_data[outer].value(
id+1) = 0;
197 return m_data[outer].value(
id+1);
204 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
206 for (Index j=0; j<outerSize(); ++j)
207 m_data[j].
prune(reference,epsilon);
214 const Index outerSize = IsRowMajor ? rows : cols;
215 m_innerSize = IsRowMajor ? cols : rows;
217 if (Index(m_data.size()) != outerSize)
219 m_data.resize(outerSize);
223 void resizeAndKeepData(Index rows, Index cols)
225 const Index outerSize = IsRowMajor ? rows : cols;
226 const Index innerSize = IsRowMajor ? cols : rows;
227 if (m_innerSize>innerSize)
234 if (m_data.size() != outerSize)
236 m_data.resize(outerSize);
242 : m_innerSize(0), m_data(0)
244 eigen_assert(innerSize()==0 && outerSize()==0);
255 template<
typename OtherDerived>
259 Base::operator=(other.derived());
263 : Base(), m_innerSize(0)
265 *
this = other.derived();
271 std::swap(m_innerSize, other.m_innerSize);
273 m_data.swap(other.m_data);
278 if (other.isRValue())
280 swap(other.const_cast_derived());
284 resize(other.rows(), other.cols());
285 m_data = other.m_data;
297 EIGEN_DEPRECATED
void startFill(Index reserveSize = 1000)
300 reserve(reserveSize);
312 EIGEN_DEPRECATED Scalar&
fill(Index row, Index col)
314 const Index outer = IsRowMajor ? row : col;
315 const Index inner = IsRowMajor ? col : row;
324 EIGEN_DEPRECATED Scalar&
fillrand(Index row, Index col)
326 return insert(row,col);
333 # ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
334 # include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
338 template<
typename Scalar,
int _Options,
typename _Index>
339 class DynamicSparseMatrix<Scalar,_Options,_Index>::InnerIterator :
public SparseVector<Scalar,_Options,_Index>::InnerIterator
341 typedef typename SparseVector<Scalar,_Options,_Index>::InnerIterator Base;
344 : Base(mat.m_data[outer]), m_outer(outer)
347 inline Index row()
const {
return IsRowMajor ? m_outer : Base::index(); }
348 inline Index col()
const {
return IsRowMajor ? Base::index() : m_outer; }
354 template<
typename Scalar,
int _Options,
typename _Index>
355 class DynamicSparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator :
public SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
357 typedef typename SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator Base;
360 : Base(mat.m_data[outer]), m_outer(outer)
363 inline Index row()
const {
return IsRowMajor ? m_outer : Base::index(); }
364 inline Index col()
const {
return IsRowMajor ? Base::index() : m_outer; }
372 #endif // EIGEN_DYNAMIC_SPARSEMATRIX_H