Eigen  3.4.90 (git rev 5a9f66fb35d03a4da9ef8976e67a61b30aa16dcf)
 
Loading...
Searching...
No Matches
SparseCompressedBase.h
1// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2015 Gael Guennebaud <[email protected]>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
11#define EIGEN_SPARSE_COMPRESSED_BASE_H
12
13// IWYU pragma: private
14#include "./InternalHeaderCheck.h"
15
16namespace Eigen {
17
18template <typename Derived>
19class SparseCompressedBase;
20
21namespace internal {
22
23template <typename Derived>
24struct traits<SparseCompressedBase<Derived>> : traits<Derived> {};
25
26template <typename Derived, class Comp, bool IsVector>
27struct inner_sort_impl;
28
29} // end namespace internal
30
42template <typename Derived>
43class SparseCompressedBase : public SparseMatrixBase<Derived> {
44 public:
46 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseCompressedBase)
47 using Base::operator=;
48 using Base::IsRowMajor;
49
50 class InnerIterator;
51 class ReverseInnerIterator;
52
53 protected:
54 typedef typename Base::IndexVector IndexVector;
55 Eigen::Map<IndexVector> innerNonZeros() {
56 return Eigen::Map<IndexVector>(innerNonZeroPtr(), isCompressed() ? 0 : derived().outerSize());
57 }
58 const Eigen::Map<const IndexVector> innerNonZeros() const {
60 }
61
62 public:
64 inline Index nonZeros() const {
65 if (Derived::IsVectorAtCompileTime && outerIndexPtr() == 0)
66 return derived().nonZeros();
67 else if (derived().outerSize() == 0)
68 return 0;
69 else if (isCompressed())
70 return outerIndexPtr()[derived().outerSize()] - outerIndexPtr()[0];
71 else
72 return innerNonZeros().sum();
73 }
74
78 inline const Scalar* valuePtr() const { return derived().valuePtr(); }
82 inline Scalar* valuePtr() { return derived().valuePtr(); }
83
87 inline const StorageIndex* innerIndexPtr() const { return derived().innerIndexPtr(); }
91 inline StorageIndex* innerIndexPtr() { return derived().innerIndexPtr(); }
92
97 inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
102 inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
103
107 inline const StorageIndex* innerNonZeroPtr() const { return derived().innerNonZeroPtr(); }
111 inline StorageIndex* innerNonZeroPtr() { return derived().innerNonZeroPtr(); }
112
114 inline bool isCompressed() const { return innerNonZeroPtr() == 0; }
115
122 eigen_assert(isCompressed());
124 }
125
140
143 template <class Comp = std::less<>>
144 inline void sortInnerIndices(Index begin, Index end) {
145 eigen_assert(begin >= 0 && end <= derived().outerSize() && end >= begin);
146 internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::run(*this, begin, end);
147 }
148
151 template <class Comp = std::less<>>
152 inline Index innerIndicesAreSorted(Index begin, Index end) const {
153 eigen_assert(begin >= 0 && end <= derived().outerSize() && end >= begin);
154 return internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::check(*this, begin, end);
155 }
156
159 template <class Comp = std::less<>>
160 inline void sortInnerIndices() {
161 Index begin = 0;
162 Index end = derived().outerSize();
163 internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::run(*this, begin, end);
164 }
165
168 template <class Comp = std::less<>>
170 Index begin = 0;
171 Index end = derived().outerSize();
172 return internal::inner_sort_impl<Derived, Comp, IsVectorAtCompileTime>::check(*this, begin, end);
173 }
174
175 protected:
178
182 internal::LowerBoundIndex lower_bound(Index row, Index col) const {
183 eigen_internal_assert(row >= 0 && row < this->rows() && col >= 0 && col < this->cols());
184
185 const Index outer = Derived::IsRowMajor ? row : col;
186 const Index inner = Derived::IsRowMajor ? col : row;
187
188 Index start = this->outerIndexPtr()[outer];
189 Index end = this->isCompressed() ? this->outerIndexPtr()[outer + 1]
190 : this->outerIndexPtr()[outer] + this->innerNonZeroPtr()[outer];
191 eigen_assert(end >= start && "you are using a non finalized sparse matrix or written coefficient does not exist");
192 internal::LowerBoundIndex p;
193 p.value =
194 std::lower_bound(this->innerIndexPtr() + start, this->innerIndexPtr() + end, inner) - this->innerIndexPtr();
195 p.found = (p.value < end) && (this->innerIndexPtr()[p.value] == inner);
196 return p;
197 }
198
199 friend struct internal::evaluator<SparseCompressedBase<Derived>>;
200
201 private:
202 template <typename OtherDerived>
204};
205
206template <typename Derived>
207class SparseCompressedBase<Derived>::InnerIterator {
208 public:
209 InnerIterator() : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0) {}
210
211 InnerIterator(const InnerIterator& other)
212 : m_values(other.m_values),
213 m_indices(other.m_indices),
214 m_outer(other.m_outer),
215 m_id(other.m_id),
216 m_end(other.m_end) {}
217
218 InnerIterator& operator=(const InnerIterator& other) {
219 m_values = other.m_values;
220 m_indices = other.m_indices;
221 const_cast<OuterType&>(m_outer).setValue(other.m_outer.value());
222 m_id = other.m_id;
223 m_end = other.m_end;
224 return *this;
225 }
226
227 InnerIterator(const SparseCompressedBase& mat, Index outer)
228 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer) {
229 if (Derived::IsVectorAtCompileTime && mat.outerIndexPtr() == 0) {
230 m_id = 0;
231 m_end = mat.nonZeros();
232 } else {
233 m_id = mat.outerIndexPtr()[outer];
234 if (mat.isCompressed())
235 m_end = mat.outerIndexPtr()[outer + 1];
236 else
237 m_end = m_id + mat.innerNonZeroPtr()[outer];
238 }
239 }
240
241 explicit InnerIterator(const SparseCompressedBase& mat) : InnerIterator(mat, Index(0)) {
242 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
243 }
244
245 explicit InnerIterator(const internal::CompressedStorage<Scalar, StorageIndex>& data)
246 : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_id(0), m_end(data.size()) {
247 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
248 }
249
250 inline InnerIterator& operator++() {
251 m_id++;
252 return *this;
253 }
254 inline InnerIterator& operator+=(Index i) {
255 m_id += i;
256 return *this;
257 }
258
259 inline InnerIterator operator+(Index i) {
260 InnerIterator result = *this;
261 result += i;
262 return result;
263 }
264
265 inline const Scalar& value() const { return m_values[m_id]; }
266 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
267
268 inline StorageIndex index() const { return m_indices[m_id]; }
269 inline Index outer() const { return m_outer.value(); }
270 inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
271 inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
272
273 inline operator bool() const { return (m_id < m_end); }
274
275 protected:
276 const Scalar* m_values;
277 const StorageIndex* m_indices;
278 typedef internal::variable_if_dynamic<Index, Derived::IsVectorAtCompileTime ? 0 : Dynamic> OuterType;
279 const OuterType m_outer;
280 Index m_id;
281 Index m_end;
282
283 private:
284 // If you get here, then you're not using the right InnerIterator type, e.g.:
285 // SparseMatrix<double,RowMajor> A;
286 // SparseMatrix<double>::InnerIterator it(A,0);
287 template <typename T>
288 InnerIterator(const SparseMatrixBase<T>&, Index outer);
289};
290
291template <typename Derived>
292class SparseCompressedBase<Derived>::ReverseInnerIterator {
293 public:
294 ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
295 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer) {
296 if (Derived::IsVectorAtCompileTime && mat.outerIndexPtr() == 0) {
297 m_start = 0;
298 m_id = mat.nonZeros();
299 } else {
300 m_start = mat.outerIndexPtr()[outer];
301 if (mat.isCompressed())
302 m_id = mat.outerIndexPtr()[outer + 1];
303 else
304 m_id = m_start + mat.innerNonZeroPtr()[outer];
305 }
306 }
307
308 explicit ReverseInnerIterator(const SparseCompressedBase& mat)
309 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros()) {
310 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
311 }
312
313 explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar, StorageIndex>& data)
314 : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_start(0), m_id(data.size()) {
315 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
316 }
317
318 inline ReverseInnerIterator& operator--() {
319 --m_id;
320 return *this;
321 }
322 inline ReverseInnerIterator& operator-=(Index i) {
323 m_id -= i;
324 return *this;
325 }
326
327 inline ReverseInnerIterator operator-(Index i) {
328 ReverseInnerIterator result = *this;
329 result -= i;
330 return result;
331 }
332
333 inline const Scalar& value() const { return m_values[m_id - 1]; }
334 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id - 1]); }
335
336 inline StorageIndex index() const { return m_indices[m_id - 1]; }
337 inline Index outer() const { return m_outer.value(); }
338 inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
339 inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
340
341 inline operator bool() const { return (m_id > m_start); }
342
343 protected:
344 const Scalar* m_values;
345 const StorageIndex* m_indices;
346 typedef internal::variable_if_dynamic<Index, Derived::IsVectorAtCompileTime ? 0 : Dynamic> OuterType;
347 const OuterType m_outer;
348 Index m_start;
349 Index m_id;
350};
351
352namespace internal {
353
354// modified from https://artificial-mind.net/blog/2020/11/28/std-sort-multiple-ranges
355
356template <typename Scalar, typename StorageIndex>
357class StorageVal;
358template <typename Scalar, typename StorageIndex>
359class StorageRef;
360template <typename Scalar, typename StorageIndex>
361class CompressedStorageIterator;
362
363// class to hold an index/value pair
364template <typename Scalar, typename StorageIndex>
365class StorageVal {
366 public:
367 StorageVal(const StorageIndex& innerIndex, const Scalar& value) : m_innerIndex(innerIndex), m_value(value) {}
368 StorageVal(const StorageVal& other) : m_innerIndex(other.m_innerIndex), m_value(other.m_value) {}
369 StorageVal(StorageVal&& other) = default;
370
371 inline const StorageIndex& key() const { return m_innerIndex; }
372 inline StorageIndex& key() { return m_innerIndex; }
373 inline const Scalar& value() const { return m_value; }
374 inline Scalar& value() { return m_value; }
375
376 // enables StorageVal to be compared with respect to any type that is convertible to StorageIndex
377 inline operator StorageIndex() const { return m_innerIndex; }
378
379 protected:
380 StorageIndex m_innerIndex;
381 Scalar m_value;
382
383 private:
384 StorageVal() = delete;
385};
386// class to hold an index/value iterator pair
387// used to define assignment, swap, and comparison operators for CompressedStorageIterator
388template <typename Scalar, typename StorageIndex>
389class StorageRef {
390 public:
391 using value_type = StorageVal<Scalar, StorageIndex>;
392
393 // StorageRef Needs to be move-able for sort on macos.
394 StorageRef(StorageRef&& other) = default;
395
396 inline StorageRef& operator=(const StorageRef& other) {
397 key() = other.key();
398 value() = other.value();
399 return *this;
400 }
401 inline StorageRef& operator=(const value_type& other) {
402 key() = other.key();
403 value() = other.value();
404 return *this;
405 }
406 inline operator value_type() const { return value_type(key(), value()); }
407 inline friend void swap(const StorageRef& a, const StorageRef& b) {
408 std::iter_swap(a.keyPtr(), b.keyPtr());
409 std::iter_swap(a.valuePtr(), b.valuePtr());
410 }
411
412 inline const StorageIndex& key() const { return *m_innerIndexIterator; }
413 inline StorageIndex& key() { return *m_innerIndexIterator; }
414 inline const Scalar& value() const { return *m_valueIterator; }
415 inline Scalar& value() { return *m_valueIterator; }
416 inline StorageIndex* keyPtr() const { return m_innerIndexIterator; }
417 inline Scalar* valuePtr() const { return m_valueIterator; }
418
419 // enables StorageRef to be compared with respect to any type that is convertible to StorageIndex
420 inline operator StorageIndex() const { return *m_innerIndexIterator; }
421
422 protected:
423 StorageIndex* m_innerIndexIterator;
424 Scalar* m_valueIterator;
425
426 private:
427 StorageRef() = delete;
428 // these constructors are called by the CompressedStorageIterator constructors for convenience only
429 StorageRef(StorageIndex* innerIndexIterator, Scalar* valueIterator)
430 : m_innerIndexIterator(innerIndexIterator), m_valueIterator(valueIterator) {}
431 StorageRef(const StorageRef& other)
432 : m_innerIndexIterator(other.m_innerIndexIterator), m_valueIterator(other.m_valueIterator) {}
433
434 friend class CompressedStorageIterator<Scalar, StorageIndex>;
435};
436
437// STL-compatible iterator class that operates on inner indices and values
438template <typename Scalar, typename StorageIndex>
439class CompressedStorageIterator {
440 public:
441 using iterator_category = std::random_access_iterator_tag;
442 using reference = StorageRef<Scalar, StorageIndex>;
443 using difference_type = Index;
444 using value_type = typename reference::value_type;
445 using pointer = value_type*;
446
447 CompressedStorageIterator() = delete;
448 CompressedStorageIterator(difference_type index, StorageIndex* innerIndexPtr, Scalar* valuePtr)
449 : m_index(index), m_data(innerIndexPtr, valuePtr) {}
450 CompressedStorageIterator(difference_type index, reference data) : m_index(index), m_data(data) {}
451 CompressedStorageIterator(const CompressedStorageIterator& other) : m_index(other.m_index), m_data(other.m_data) {}
452 CompressedStorageIterator(CompressedStorageIterator&& other) = default;
453 inline CompressedStorageIterator& operator=(const CompressedStorageIterator& other) {
454 m_index = other.m_index;
455 m_data = other.m_data;
456 return *this;
457 }
458
459 inline CompressedStorageIterator operator+(difference_type offset) const {
460 return CompressedStorageIterator(m_index + offset, m_data);
461 }
462 inline CompressedStorageIterator operator-(difference_type offset) const {
463 return CompressedStorageIterator(m_index - offset, m_data);
464 }
465 inline difference_type operator-(const CompressedStorageIterator& other) const { return m_index - other.m_index; }
466 inline CompressedStorageIterator& operator++() {
467 ++m_index;
468 return *this;
469 }
470 inline CompressedStorageIterator& operator--() {
471 --m_index;
472 return *this;
473 }
474 inline CompressedStorageIterator& operator+=(difference_type offset) {
475 m_index += offset;
476 return *this;
477 }
478 inline CompressedStorageIterator& operator-=(difference_type offset) {
479 m_index -= offset;
480 return *this;
481 }
482 inline reference operator*() const { return reference(m_data.keyPtr() + m_index, m_data.valuePtr() + m_index); }
483
484#define MAKE_COMP(OP) \
485 inline bool operator OP(const CompressedStorageIterator& other) const { return m_index OP other.m_index; }
486 MAKE_COMP(<)
487 MAKE_COMP(>)
488 MAKE_COMP(>=)
489 MAKE_COMP(<=)
490 MAKE_COMP(!=)
491 MAKE_COMP(==)
492#undef MAKE_COMP
493
494 protected:
495 difference_type m_index;
496 reference m_data;
497};
498
499template <typename Derived, class Comp, bool IsVector>
500struct inner_sort_impl {
501 typedef typename Derived::Scalar Scalar;
502 typedef typename Derived::StorageIndex StorageIndex;
503 static inline void run(SparseCompressedBase<Derived>& obj, Index begin, Index end) {
504 const bool is_compressed = obj.isCompressed();
505 for (Index outer = begin; outer < end; outer++) {
506 Index begin_offset = obj.outerIndexPtr()[outer];
507 Index end_offset = is_compressed ? obj.outerIndexPtr()[outer + 1] : (begin_offset + obj.innerNonZeroPtr()[outer]);
508 CompressedStorageIterator<Scalar, StorageIndex> begin_it(begin_offset, obj.innerIndexPtr(), obj.valuePtr());
509 CompressedStorageIterator<Scalar, StorageIndex> end_it(end_offset, obj.innerIndexPtr(), obj.valuePtr());
510 std::sort(begin_it, end_it, Comp());
511 }
512 }
513 static inline Index check(const SparseCompressedBase<Derived>& obj, Index begin, Index end) {
514 const bool is_compressed = obj.isCompressed();
515 for (Index outer = begin; outer < end; outer++) {
516 Index begin_offset = obj.outerIndexPtr()[outer];
517 Index end_offset = is_compressed ? obj.outerIndexPtr()[outer + 1] : (begin_offset + obj.innerNonZeroPtr()[outer]);
518 const StorageIndex* begin_it = obj.innerIndexPtr() + begin_offset;
519 const StorageIndex* end_it = obj.innerIndexPtr() + end_offset;
520 bool is_sorted = std::is_sorted(begin_it, end_it, Comp());
521 if (!is_sorted) return outer;
522 }
523 return end;
524 }
525};
526template <typename Derived, class Comp>
527struct inner_sort_impl<Derived, Comp, true> {
528 typedef typename Derived::Scalar Scalar;
529 typedef typename Derived::StorageIndex StorageIndex;
530 static inline void run(SparseCompressedBase<Derived>& obj, Index, Index) {
531 Index begin_offset = 0;
532 Index end_offset = obj.nonZeros();
533 CompressedStorageIterator<Scalar, StorageIndex> begin_it(begin_offset, obj.innerIndexPtr(), obj.valuePtr());
534 CompressedStorageIterator<Scalar, StorageIndex> end_it(end_offset, obj.innerIndexPtr(), obj.valuePtr());
535 std::sort(begin_it, end_it, Comp());
536 }
537 static inline Index check(const SparseCompressedBase<Derived>& obj, Index, Index) {
538 Index begin_offset = 0;
539 Index end_offset = obj.nonZeros();
540 const StorageIndex* begin_it = obj.innerIndexPtr() + begin_offset;
541 const StorageIndex* end_it = obj.innerIndexPtr() + end_offset;
542 return std::is_sorted(begin_it, end_it, Comp()) ? 1 : 0;
543 }
544};
545
546template <typename Derived>
547struct evaluator<SparseCompressedBase<Derived>> : evaluator_base<Derived> {
548 typedef typename Derived::Scalar Scalar;
549 typedef typename Derived::InnerIterator InnerIterator;
550
551 enum { CoeffReadCost = NumTraits<Scalar>::ReadCost, Flags = Derived::Flags };
552
553 evaluator() : m_matrix(0), m_zero(0) { EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost); }
554 explicit evaluator(const Derived& mat) : m_matrix(&mat), m_zero(0) { EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost); }
555
556 inline Index nonZerosEstimate() const { return m_matrix->nonZeros(); }
557
558 operator Derived&() { return m_matrix->const_cast_derived(); }
559 operator const Derived&() const { return *m_matrix; }
560
561 typedef typename DenseCoeffsBase<Derived, ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
562 const Scalar& coeff(Index row, Index col) const {
563 Index p = find(row, col);
564
565 if (p == Dynamic)
566 return m_zero;
567 else
568 return m_matrix->const_cast_derived().valuePtr()[p];
569 }
570
571 Scalar& coeffRef(Index row, Index col) {
572 Index p = find(row, col);
573 eigen_assert(p != Dynamic && "written coefficient does not exist");
574 return m_matrix->const_cast_derived().valuePtr()[p];
575 }
576
577 protected:
578 Index find(Index row, Index col) const {
579 internal::LowerBoundIndex p = m_matrix->lower_bound(row, col);
580 return p.found ? p.value : Dynamic;
581 }
582
583 const Derived* m_matrix;
584 const Scalar m_zero;
585};
586
587} // namespace internal
588
589} // end namespace Eigen
590
591#endif // EIGEN_SPARSE_COMPRESSED_BASE_H
General-purpose arrays with easy API for coefficient-wise operations.
Definition Array.h:48
A matrix or vector expression mapping an existing array of data.
Definition Map.h:96
Common base class for sparse [compressed]-{row|column}-storage format.
Definition SparseCompressedBase.h:43
StorageIndex * innerNonZeroPtr()
Definition SparseCompressedBase.h:111
Index nonZeros() const
Definition SparseCompressedBase.h:64
Index innerIndicesAreSorted() const
Definition SparseCompressedBase.h:169
const StorageIndex * innerIndexPtr() const
Definition SparseCompressedBase.h:87
StorageIndex * innerIndexPtr()
Definition SparseCompressedBase.h:91
void sortInnerIndices()
Definition SparseCompressedBase.h:160
const Scalar * valuePtr() const
Definition SparseCompressedBase.h:78
Index innerIndicesAreSorted(Index begin, Index end) const
Definition SparseCompressedBase.h:152
bool isCompressed() const
Definition SparseCompressedBase.h:114
const StorageIndex * outerIndexPtr() const
Definition SparseCompressedBase.h:97
void sortInnerIndices(Index begin, Index end)
Definition SparseCompressedBase.h:144
Scalar * valuePtr()
Definition SparseCompressedBase.h:82
Map< Array< Scalar, Dynamic, 1 > > coeffs()
Definition SparseCompressedBase.h:136
const StorageIndex * innerNonZeroPtr() const
Definition SparseCompressedBase.h:107
StorageIndex * outerIndexPtr()
Definition SparseCompressedBase.h:102
const Map< const Array< Scalar, Dynamic, 1 > > coeffs() const
Definition SparseCompressedBase.h:121
SparseCompressedBase()
Definition SparseCompressedBase.h:177
Base class of any sparse matrices or sparse expressions.
Definition SparseMatrixBase.h:30
internal::traits< Derived >::StorageIndex StorageIndex
Definition SparseMatrixBase.h:44
Index size() const
Definition SparseMatrixBase.h:187
Index rows() const
Definition SparseMatrixBase.h:182
Index outerSize() const
Definition SparseMatrixBase.h:195
Index cols() const
Definition SparseMatrixBase.h:184
Namespace containing all symbols from the Eigen library.
Definition Core:137
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:83
const int Dynamic
Definition Constants.h:25
Eigen::Index Index
The interface type of indices.
Definition EigenBase.h:43
Derived & derived()
Definition EigenBase.h:49