10#ifndef EIGEN_SPARSEVECTOR_H
11#define EIGEN_SPARSEVECTOR_H
14#include "./InternalHeaderCheck.h"
32template <
typename Scalar_,
int Options_,
typename StorageIndex_>
33struct traits<SparseVector<Scalar_, Options_, StorageIndex_> > {
34 typedef Scalar_ Scalar;
35 typedef StorageIndex_ StorageIndex;
36 typedef Sparse StorageKind;
37 typedef MatrixXpr XprKind;
41 RowsAtCompileTime = IsColVector ?
Dynamic : 1,
42 ColsAtCompileTime = IsColVector ? 1 :
Dynamic,
43 MaxRowsAtCompileTime = RowsAtCompileTime,
44 MaxColsAtCompileTime = ColsAtCompileTime,
46 SupportedAccessPatterns = InnerRandomAccessPattern
51enum { SVA_RuntimeSwitch, SVA_Inner, SVA_Outer };
53template <
typename Dest,
typename Src,
54 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
55 : Src::InnerSizeAtCompileTime == 1 ? SVA_Outer
57struct sparse_vector_assign_selector;
61template <
typename Scalar_,
int Options_,
typename StorageIndex_>
64 using Base::convert_index;
68 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(
SparseVector, +=)
69 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(
SparseVector, -=)
71 typedef internal::CompressedStorage<Scalar, StorageIndex> Storage;
72 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
74 enum { Options = Options_ };
76 EIGEN_STRONG_INLINE
Index rows()
const {
return IsColVector ? m_size : 1; }
77 EIGEN_STRONG_INLINE
Index cols()
const {
return IsColVector ? 1 : m_size; }
78 EIGEN_STRONG_INLINE
Index innerSize()
const {
return m_size; }
79 EIGEN_STRONG_INLINE
Index outerSize()
const {
return 1; }
81 EIGEN_STRONG_INLINE
const Scalar* valuePtr()
const {
return m_data.valuePtr(); }
82 EIGEN_STRONG_INLINE Scalar* valuePtr() {
return m_data.valuePtr(); }
84 EIGEN_STRONG_INLINE
const StorageIndex* innerIndexPtr()
const {
return m_data.indexPtr(); }
85 EIGEN_STRONG_INLINE
StorageIndex* innerIndexPtr() {
return m_data.indexPtr(); }
87 inline const StorageIndex* outerIndexPtr()
const {
return 0; }
89 inline const StorageIndex* innerNonZeroPtr()
const {
return 0; }
93 inline Storage& data() {
return m_data; }
95 inline const Storage& data()
const {
return m_data; }
97 inline Scalar coeff(
Index row,
Index col)
const {
98 eigen_assert(IsColVector ? (col == 0 && row >= 0 && row < m_size) : (row == 0 && col >= 0 && col < m_size));
99 return coeff(IsColVector ? row : col);
101 inline Scalar coeff(
Index i)
const {
102 eigen_assert(i >= 0 && i < m_size);
106 inline Scalar& coeffRef(
Index row,
Index col) {
107 eigen_assert(IsColVector ? (col == 0 && row >= 0 && row < m_size) : (row == 0 && col >= 0 && col < m_size));
108 return coeffRef(IsColVector ? row : col);
118 eigen_assert(i >= 0 && i < m_size);
124 typedef typename Base::InnerIterator InnerIterator;
125 typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
127 inline void setZero() { m_data.clear(); }
132 inline void startVec(
Index outer) {
133 EIGEN_UNUSED_VARIABLE(outer);
134 eigen_assert(outer == 0);
137 inline Scalar& insertBackByOuterInner(Index outer, Index inner) {
138 EIGEN_UNUSED_VARIABLE(outer);
139 eigen_assert(outer == 0);
140 return insertBack(inner);
142 inline Scalar& insertBack(Index i) {
144 return m_data.value(m_data.size() - 1);
147 Scalar& insertBackByOuterInnerUnordered(Index outer, Index inner) {
148 EIGEN_UNUSED_VARIABLE(outer);
149 eigen_assert(outer == 0);
150 return insertBackUnordered(inner);
152 inline Scalar& insertBackUnordered(Index i) {
154 return m_data.value(m_data.size() - 1);
157 inline Scalar& insert(Index row, Index col) {
158 eigen_assert(IsColVector ? (col == 0 && row >= 0 && row < m_size) : (row == 0 && col >= 0 && col < m_size));
160 Index inner = IsColVector ? row : col;
161 Index outer = IsColVector ? col : row;
162 EIGEN_ONLY_USED_FOR_DEBUG(outer);
163 eigen_assert(outer == 0);
164 return insert(inner);
166 Scalar& insert(Index i) {
167 eigen_assert(i >= 0 && i < m_size);
170 Index p = Index(m_data.size()) - 1;
172 m_data.resize(p + 2, 1);
174 while ((p >= startId) && (m_data.index(p) > i)) {
175 m_data.index(p + 1) = m_data.index(p);
176 m_data.value(p + 1) = m_data.value(p);
179 m_data.index(p + 1) = convert_index(i);
180 m_data.value(p + 1) = 0;
181 return m_data.value(p + 1);
186 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
188 inline void finalize() {}
192 return prune([&](
const Scalar& val) {
return !internal::isMuchSmallerThan(val, reference, epsilon); });
205 Index n = m_data.size();
206 for (
Index i = 0; i < n; ++i) {
207 if (keep_predicate(m_data.value(i))) {
208 m_data.value(k) = std::move(m_data.value(i));
209 m_data.index(k) = m_data.index(i);
226 eigen_assert((IsColVector ? cols : rows) == 1 &&
"Outer dimension must equal 1");
227 resize(IsColVector ? rows : cols);
247 if (newSize < m_size) {
249 while (i < m_data.size() && m_data.index(i) < newSize) ++i;
255 void resizeNonZeros(
Index size) { m_data.resize(size); }
257 inline SparseVector() : m_size(0) { resize(0); }
259 explicit inline SparseVector(Index size) : m_size(0) { resize(size); }
261 inline SparseVector(Index rows, Index cols) : m_size(0) { resize(rows, cols); }
263 template <
typename OtherDerived>
264 inline SparseVector(
const SparseMatrixBase<OtherDerived>& other) : m_size(0) {
265#ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
266 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
268 *
this = other.derived();
271 inline SparseVector(
const SparseVector& other) : Base(other), m_size(0) { *
this = other.derived(); }
278 std::swap(m_size, other.m_size);
279 m_data.swap(other.m_data);
282 template <
int OtherOptions>
285 std::swap(m_size, other.m_innerSize);
286 m_data.swap(other.m_data);
289 inline SparseVector& operator=(
const SparseVector& other) {
290 if (other.isRValue()) {
291 swap(other.const_cast_derived());
293 resize(other.size());
294 m_data = other.m_data;
299 template <
typename OtherDerived>
300 inline SparseVector& operator=(
const SparseMatrixBase<OtherDerived>& other) {
301 SparseVector tmp(other.size());
302 internal::sparse_vector_assign_selector<SparseVector, OtherDerived>::run(tmp, other.derived());
307 inline SparseVector(SparseVector&& other) : SparseVector() { this->swap(other); }
309 template <
typename OtherDerived>
310 inline SparseVector(SparseCompressedBase<OtherDerived>&& other) : SparseVector() {
311 *
this = other.derived().markAsRValue();
314 inline SparseVector& operator=(SparseVector&& other) {
319 template <
typename OtherDerived>
320 inline SparseVector& operator=(SparseCompressedBase<OtherDerived>&& other) {
321 *
this = other.derived().markAsRValue();
325#ifndef EIGEN_PARSED_BY_DOXYGEN
326 template <
typename Lhs,
typename Rhs>
327 inline SparseVector& operator=(
const SparseSparseProduct<Lhs, Rhs>& product) {
328 return Base::operator=(product);
333 friend std::ostream& operator<<(std::ostream& s,
const SparseVector& m) {
334 for (Index i = 0; i < m.nonZeros(); ++i) s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
348 EIGEN_DEPRECATED
void startFill(
Index reserve) {
350 m_data.reserve(reserve);
354 EIGEN_DEPRECATED Scalar& fill(Index r, Index c) {
355 eigen_assert(r == 0 || c == 0);
356 return fill(IsColVector ? r : c);
360 EIGEN_DEPRECATED Scalar& fill(Index i) {
362 return m_data.value(m_data.size() - 1);
366 EIGEN_DEPRECATED Scalar& fillrand(Index r, Index c) {
367 eigen_assert(r == 0 || c == 0);
368 return fillrand(IsColVector ? r : c);
372 EIGEN_DEPRECATED Scalar& fillrand(Index i) {
return insert(i); }
375 EIGEN_DEPRECATED
void endFill() {}
379 EIGEN_DEPRECATED Storage& _data() {
return m_data; }
381 EIGEN_DEPRECATED
const Storage& _data()
const {
return m_data; }
383#ifdef EIGEN_SPARSEVECTOR_PLUGIN
384#include EIGEN_SPARSEVECTOR_PLUGIN
388 EIGEN_STATIC_ASSERT(NumTraits<StorageIndex>::IsSigned, THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE)
389 EIGEN_STATIC_ASSERT((Options_ & (ColMajor | RowMajor)) == Options, INVALID_MATRIX_TEMPLATE_PARAMETERS)
397template <
typename Scalar_,
int Options_,
typename Index_>
398struct evaluator<SparseVector<Scalar_, Options_, Index_> > : evaluator_base<SparseVector<Scalar_, Options_, Index_> > {
399 typedef SparseVector<Scalar_, Options_, Index_> SparseVectorType;
400 typedef evaluator_base<SparseVectorType> Base;
401 typedef typename SparseVectorType::InnerIterator InnerIterator;
402 typedef typename SparseVectorType::ReverseInnerIterator ReverseInnerIterator;
404 enum { CoeffReadCost = NumTraits<Scalar_>::ReadCost, Flags = SparseVectorType::Flags };
406 evaluator() : Base() {}
408 explicit evaluator(
const SparseVectorType& mat) : m_matrix(&mat) { EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost); }
410 inline Index nonZerosEstimate()
const {
return m_matrix->nonZeros(); }
412 operator SparseVectorType&() {
return m_matrix->const_cast_derived(); }
413 operator const SparseVectorType&()
const {
return *m_matrix; }
415 const SparseVectorType* m_matrix;
418template <
typename Dest,
typename Src>
419struct sparse_vector_assign_selector<Dest, Src, SVA_Inner> {
420 static void run(Dest& dst,
const Src& src) {
421 eigen_internal_assert(src.innerSize() == src.size());
422 typedef internal::evaluator<Src> SrcEvaluatorType;
423 SrcEvaluatorType srcEval(src);
424 for (
typename SrcEvaluatorType::InnerIterator it(srcEval, 0); it; ++it) dst.insert(it.index()) = it.value();
428template <
typename Dest,
typename Src>
429struct sparse_vector_assign_selector<Dest, Src, SVA_Outer> {
430 static void run(Dest& dst,
const Src& src) {
431 eigen_internal_assert(src.outerSize() == src.size());
432 typedef internal::evaluator<Src> SrcEvaluatorType;
433 SrcEvaluatorType srcEval(src);
434 for (Index i = 0; i < src.size(); ++i) {
435 typename SrcEvaluatorType::InnerIterator it(srcEval, i);
436 if (it) dst.insert(i) = it.value();
441template <
typename Dest,
typename Src>
442struct sparse_vector_assign_selector<Dest, Src, SVA_RuntimeSwitch> {
443 static void run(Dest& dst,
const Src& src) {
444 if (src.outerSize() == 1)
445 sparse_vector_assign_selector<Dest, Src, SVA_Inner>::run(dst, src);
447 sparse_vector_assign_selector<Dest, Src, SVA_Outer>::run(dst, src);
455template <
typename Scalar,
int Options,
typename StorageIndex>
456class Serializer<SparseVector<Scalar, Options, StorageIndex>, void> {
458 typedef SparseVector<Scalar, Options, StorageIndex> SparseMat;
461 typename SparseMat::Index size;
465 EIGEN_DEVICE_FUNC
size_t size(
const SparseMat& value)
const {
466 return sizeof(Header) + (
sizeof(Scalar) +
sizeof(StorageIndex)) * value.nonZeros();
469 EIGEN_DEVICE_FUNC uint8_t*
serialize(uint8_t* dest, uint8_t* end,
const SparseMat& value) {
470 if (EIGEN_PREDICT_FALSE(dest ==
nullptr))
return nullptr;
471 if (EIGEN_PREDICT_FALSE(dest + size(value) > end))
return nullptr;
473 const size_t header_bytes =
sizeof(Header);
474 Header header = {value.innerSize(), value.nonZeros()};
475 EIGEN_USING_STD(memcpy)
476 memcpy(dest, &header, header_bytes);
477 dest += header_bytes;
480 std::size_t data_bytes =
sizeof(StorageIndex) * header.num_non_zeros;
481 memcpy(dest, value.innerIndexPtr(), data_bytes);
485 data_bytes =
sizeof(Scalar) * header.num_non_zeros;
486 memcpy(dest, value.valuePtr(), data_bytes);
492 EIGEN_DEVICE_FUNC
const uint8_t*
deserialize(
const uint8_t* src,
const uint8_t* end, SparseMat& value)
const {
493 if (EIGEN_PREDICT_FALSE(src ==
nullptr))
return nullptr;
494 if (EIGEN_PREDICT_FALSE(src +
sizeof(Header) > end))
return nullptr;
496 const size_t header_bytes =
sizeof(Header);
498 EIGEN_USING_STD(memcpy)
499 memcpy(&header, src, header_bytes);
503 value.resize(header.size);
504 value.resizeNonZeros(header.num_non_zeros);
507 std::size_t data_bytes =
sizeof(StorageIndex) * header.num_non_zeros;
508 if (EIGEN_PREDICT_FALSE(src + data_bytes > end))
return nullptr;
509 memcpy(value.innerIndexPtr(), src, data_bytes);
513 data_bytes =
sizeof(Scalar) * header.num_non_zeros;
514 if (EIGEN_PREDICT_FALSE(src + data_bytes > end))
return nullptr;
515 memcpy(value.valuePtr(), src, data_bytes);
Common base class for sparse [compressed]-{row|column}-storage format.
Definition SparseCompressedBase.h:43
internal::traits< SparseVector< Scalar_, Options_, StorageIndex_ > >::StorageIndex StorageIndex
Definition SparseMatrixBase.h:44
A versatible sparse matrix representation.
Definition SparseUtil.h:47
Index outerSize() const
Definition SparseMatrix.h:166
a sparse vector class
Definition SparseVector.h:62
void conservativeResize(Index newSize)
Definition SparseVector.h:246
Scalar & coeffRef(Index i)
Definition SparseVector.h:117
Index prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
Definition SparseVector.h:191
void resize(Index rows, Index cols)
Definition SparseVector.h:225
~SparseVector()
Definition SparseVector.h:341
void swap(SparseVector &other)
Definition SparseVector.h:277
Index prune(F &&keep_predicate)
Prunes the entries of the vector based on a predicate
Definition SparseVector.h:203
Index nonZeros() const
Definition SparseVector.h:130
void resize(Index newSize)
Definition SparseVector.h:234
const unsigned int LvalueBit
Definition Constants.h:148
const unsigned int RowMajorBit
Definition Constants.h:70
const unsigned int CompressedAccessBit
Definition Constants.h:195
Namespace containing all symbols from the Eigen library.
Definition Core:137
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:83
uint8_t * serialize(uint8_t *dest, uint8_t *end, const Args &... args)
Definition Serializer.h:188
const int Dynamic
Definition Constants.h:25
const uint8_t * deserialize(const uint8_t *src, const uint8_t *end, Args &... args)
Definition Serializer.h:201
Holds information about the various numeric (i.e. scalar) types allowed by Eigen.
Definition Meta.h:523