10 #ifndef EIGEN_SPARSEVECTOR_H
11 #define EIGEN_SPARSEVECTOR_H
29 template<
typename _Scalar,
int _Options,
typename _Index>
30 struct traits<SparseVector<_Scalar, _Options, _Index> >
32 typedef _Scalar Scalar;
34 typedef Sparse StorageKind;
35 typedef MatrixXpr XprKind;
39 RowsAtCompileTime = IsColVector ?
Dynamic : 1,
40 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
41 MaxRowsAtCompileTime = RowsAtCompileTime,
42 MaxColsAtCompileTime = ColsAtCompileTime,
44 CoeffReadCost = NumTraits<Scalar>::ReadCost,
45 SupportedAccessPatterns = InnerRandomAccessPattern
56 template<
typename Dest,
typename Src,
57 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
58 : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
60 struct sparse_vector_assign_selector;
64 template<
typename _Scalar,
int _Options,
typename _Index>
66 :
public SparseMatrixBase<SparseVector<_Scalar, _Options, _Index> >
68 typedef SparseMatrixBase<SparseVector> SparseBase;
71 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
72 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
73 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
75 typedef internal::CompressedStorage<Scalar,Index> Storage;
76 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
82 EIGEN_STRONG_INLINE Index rows()
const {
return IsColVector ? m_size : 1; }
83 EIGEN_STRONG_INLINE Index cols()
const {
return IsColVector ? 1 : m_size; }
84 EIGEN_STRONG_INLINE Index innerSize()
const {
return m_size; }
85 EIGEN_STRONG_INLINE Index outerSize()
const {
return 1; }
87 EIGEN_STRONG_INLINE
const Scalar* valuePtr()
const {
return &m_data.value(0); }
88 EIGEN_STRONG_INLINE Scalar* valuePtr() {
return &m_data.value(0); }
90 EIGEN_STRONG_INLINE
const Index* innerIndexPtr()
const {
return &m_data.index(0); }
91 EIGEN_STRONG_INLINE Index* innerIndexPtr() {
return &m_data.index(0); }
94 inline Storage& data() {
return m_data; }
96 inline const Storage& data()
const {
return m_data; }
98 inline Scalar coeff(Index
row, Index
col)
const
100 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
101 return coeff(IsColVector ? row : col);
103 inline Scalar coeff(Index i)
const
105 eigen_assert(i>=0 && i<m_size);
109 inline Scalar& coeffRef(Index row, Index col)
111 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
112 return coeff(IsColVector ? row : col);
123 eigen_assert(i>=0 && i<m_size);
124 return m_data.atWithInsertion(i);
130 class ReverseInnerIterator;
132 inline void setZero() { m_data.clear(); }
135 inline Index
nonZeros()
const {
return static_cast<Index
>(m_data.size()); }
137 inline void startVec(Index outer)
139 EIGEN_UNUSED_VARIABLE(outer);
140 eigen_assert(outer==0);
143 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
145 EIGEN_UNUSED_VARIABLE(outer);
146 eigen_assert(outer==0);
147 return insertBack(inner);
149 inline Scalar& insertBack(Index i)
152 return m_data.value(m_data.size()-1);
155 inline Scalar& insert(Index row, Index col)
157 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
159 Index inner = IsColVector ? row :
col;
160 Index outer = IsColVector ? col :
row;
161 eigen_assert(outer==0);
162 return insert(inner);
164 Scalar& insert(Index i)
166 eigen_assert(i>=0 && i<m_size);
169 Index p = Index(m_data.size()) - 1;
171 m_data.resize(p+2,1);
173 while ( (p >= startId) && (m_data.index(p) > i) )
175 m_data.index(p+1) = m_data.index(p);
176 m_data.value(p+1) = m_data.value(p);
179 m_data.index(p+1) = i;
180 m_data.value(p+1) = 0;
181 return m_data.value(p+1);
186 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
189 inline void finalize() {}
191 void prune(
const Scalar& reference,
const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
193 m_data.prune(reference,epsilon);
196 void resize(Index rows, Index cols)
198 eigen_assert(rows==1 || cols==1);
199 resize(IsColVector ? rows : cols);
202 void resize(Index newSize)
208 void resizeNonZeros(Index
size) { m_data.resize(size); }
210 inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
212 inline SparseVector(Index size) : m_size(0) { check_template_parameters(); resize(size); }
214 inline SparseVector(Index rows, Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); }
216 template<
typename OtherDerived>
217 inline SparseVector(
const SparseMatrixBase<OtherDerived>& other)
220 check_template_parameters();
221 *
this = other.derived();
224 inline SparseVector(
const SparseVector& other)
225 : SparseBase(other), m_size(0)
227 check_template_parameters();
228 *
this = other.derived();
237 std::swap(m_size, other.m_size);
238 m_data.swap(other.m_data);
243 if (other.isRValue())
245 swap(other.const_cast_derived());
249 resize(other.size());
250 m_data = other.m_data;
255 template<
typename OtherDerived>
256 inline SparseVector& operator=(
const SparseMatrixBase<OtherDerived>& other)
258 SparseVector tmp(other.size());
259 internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
264 #ifndef EIGEN_PARSED_BY_DOXYGEN
265 template<
typename Lhs,
typename Rhs>
266 inline SparseVector& operator=(
const SparseSparseProduct<Lhs,Rhs>& product)
268 return Base::operator=(product);
272 friend std::ostream & operator << (std::ostream & s,
const SparseVector& m)
274 for (Index i=0; i<m.nonZeros(); ++i)
275 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
289 EIGEN_DEPRECATED
void startFill(Index reserve)
292 m_data.reserve(reserve);
296 EIGEN_DEPRECATED Scalar& fill(Index r, Index c)
298 eigen_assert(r==0 || c==0);
299 return fill(IsColVector ? r : c);
303 EIGEN_DEPRECATED Scalar& fill(Index i)
306 return m_data.value(m_data.size()-1);
310 EIGEN_DEPRECATED Scalar& fillrand(Index r, Index c)
312 eigen_assert(r==0 || c==0);
313 return fillrand(IsColVector ? r : c);
317 EIGEN_DEPRECATED Scalar& fillrand(Index i)
323 EIGEN_DEPRECATED
void endFill() {}
327 EIGEN_DEPRECATED Storage& _data() {
return m_data; }
329 EIGEN_DEPRECATED
const Storage& _data()
const {
return m_data; }
331 # ifdef EIGEN_SPARSEVECTOR_PLUGIN
332 # include EIGEN_SPARSEVECTOR_PLUGIN
337 static void check_template_parameters()
339 EIGEN_STATIC_ASSERT(NumTraits<Index>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
340 EIGEN_STATIC_ASSERT((_Options&(
ColMajor|
RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
347 template<
typename Scalar,
int _Options,
typename _Index>
348 class SparseVector<Scalar,_Options,_Index>::InnerIterator
351 InnerIterator(
const SparseVector& vec, Index outer=0)
352 : m_data(vec.m_data), m_id(0), m_end(static_cast<Index>(m_data.size()))
354 EIGEN_UNUSED_VARIABLE(outer);
355 eigen_assert(outer==0);
358 InnerIterator(
const internal::CompressedStorage<Scalar,Index>& data)
359 : m_data(data), m_id(0), m_end(static_cast<Index>(m_data.size()))
362 inline InnerIterator& operator++() { m_id++;
return *
this; }
364 inline Scalar value()
const {
return m_data.value(m_id); }
365 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_data.value(m_id)); }
367 inline Index index()
const {
return m_data.index(m_id); }
368 inline Index
row()
const {
return IsColVector ? index() : 0; }
369 inline Index
col()
const {
return IsColVector ? 0 : index(); }
371 inline operator bool()
const {
return (m_id < m_end); }
374 const internal::CompressedStorage<Scalar,Index>& m_data;
379 template<
typename Scalar,
int _Options,
typename _Index>
380 class SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
383 ReverseInnerIterator(
const SparseVector& vec, Index outer=0)
384 : m_data(vec.m_data), m_id(static_cast<Index>(m_data.size())), m_start(0)
386 EIGEN_UNUSED_VARIABLE(outer);
387 eigen_assert(outer==0);
390 ReverseInnerIterator(
const internal::CompressedStorage<Scalar,Index>& data)
391 : m_data(data), m_id(static_cast<Index>(m_data.size())), m_start(0)
394 inline ReverseInnerIterator& operator--() { m_id--;
return *
this; }
396 inline Scalar value()
const {
return m_data.value(m_id-1); }
397 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_data.value(m_id-1)); }
399 inline Index index()
const {
return m_data.index(m_id-1); }
400 inline Index
row()
const {
return IsColVector ? index() : 0; }
401 inline Index
col()
const {
return IsColVector ? 0 : index(); }
403 inline operator bool()
const {
return (m_id > m_start); }
406 const internal::CompressedStorage<Scalar,Index>& m_data;
413 template<
typename Dest,
typename Src>
414 struct sparse_vector_assign_selector<Dest,Src,SVA_Inner> {
415 static void run(Dest& dst,
const Src& src) {
416 eigen_internal_assert(src.innerSize()==src.size());
417 for(
typename Src::InnerIterator it(src, 0); it; ++it)
418 dst.insert(it.index()) = it.value();
422 template<
typename Dest,
typename Src>
423 struct sparse_vector_assign_selector<Dest,Src,SVA_Outer> {
424 static void run(Dest& dst,
const Src& src) {
425 eigen_internal_assert(src.outerSize()==src.size());
426 for(
typename Dest::Index i=0; i<src.size(); ++i)
428 typename Src::InnerIterator it(src, i);
430 dst.insert(i) = it.value();
435 template<
typename Dest,
typename Src>
436 struct sparse_vector_assign_selector<Dest,Src,SVA_RuntimeSwitch> {
437 static void run(Dest& dst,
const Src& src) {
438 if(src.outerSize()==1) sparse_vector_assign_selector<Dest,Src,SVA_Inner>::run(dst, src);
439 else sparse_vector_assign_selector<Dest,Src,SVA_Outer>::run(dst, src);
447 #endif // EIGEN_SPARSEVECTOR_H
Index nonZeros() const
Definition: SparseVector.h:135
void swap(SparseVector &other)
Definition: SparseVector.h:235
Definition: Constants.h:266
RowXpr row(Index i)
Definition: SparseMatrixBase.h:750
const int Dynamic
Definition: Constants.h:21
Definition: Constants.h:264
a sparse vector class
Definition: SparseUtil.h:73
const unsigned int LvalueBit
Definition: Constants.h:131
Scalar & coeffRef(Index i)
Definition: SparseVector.h:121
Index size() const
Definition: SparseMatrixBase.h:155
~SparseVector()
Definition: SparseVector.h:281
Scalar sum() const
Definition: SparseRedux.h:37
const unsigned int RowMajorBit
Definition: Constants.h:53
ColXpr col(Index i)
Definition: SparseMatrixBase.h:733