Eigen  3.4.90 (git rev 5a9f66fb35d03a4da9ef8976e67a61b30aa16dcf)
 
Loading...
Searching...
No Matches
SparseBlock.h
1// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008-2014 Gael Guennebaud <[email protected]>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_SPARSE_BLOCK_H
11#define EIGEN_SPARSE_BLOCK_H
12
13// IWYU pragma: private
14#include "./InternalHeaderCheck.h"
15
16namespace Eigen {
17
18// Subset of columns or rows
19template <typename XprType, int BlockRows, int BlockCols>
20class BlockImpl<XprType, BlockRows, BlockCols, true, Sparse>
21 : public SparseMatrixBase<Block<XprType, BlockRows, BlockCols, true> > {
22 typedef internal::remove_all_t<typename XprType::Nested> MatrixTypeNested_;
23 typedef Block<XprType, BlockRows, BlockCols, true> BlockType;
24
25 public:
26 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
27
28 protected:
29 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
30 typedef SparseMatrixBase<BlockType> Base;
31 using Base::convert_index;
32
33 public:
34 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
35
36 inline BlockImpl(XprType& xpr, Index i) : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize) {}
37
38 inline BlockImpl(XprType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
39 : m_matrix(xpr),
40 m_outerStart(convert_index(IsRowMajor ? startRow : startCol)),
41 m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols)) {}
42
43 EIGEN_STRONG_INLINE Index rows() const { return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
44 EIGEN_STRONG_INLINE Index cols() const { return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
45
46 Index nonZeros() const {
47 typedef internal::evaluator<XprType> EvaluatorType;
48 EvaluatorType matEval(m_matrix);
49 Index nnz = 0;
50 Index end = m_outerStart + m_outerSize.value();
51 for (Index j = m_outerStart; j < end; ++j)
52 for (typename EvaluatorType::InnerIterator it(matEval, j); it; ++it) ++nnz;
53 return nnz;
54 }
55
56 inline const Scalar coeff(Index row, Index col) const {
57 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
58 }
59
60 inline const Scalar coeff(Index index) const {
61 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
62 }
63
64 inline const XprType& nestedExpression() const { return m_matrix; }
65 inline XprType& nestedExpression() { return m_matrix; }
66 Index startRow() const { return IsRowMajor ? m_outerStart : 0; }
67 Index startCol() const { return IsRowMajor ? 0 : m_outerStart; }
68 Index blockRows() const { return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
69 Index blockCols() const { return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
70
71 protected:
72 typename internal::ref_selector<XprType>::non_const_type m_matrix;
73 Index m_outerStart;
74 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
75
76 protected:
77 // Disable assignment with clear error message.
78 // Note that simply removing operator= yields compilation errors with ICC+MSVC
79 template <typename T>
80 BlockImpl& operator=(const T&) {
81 EIGEN_STATIC_ASSERT(sizeof(T) == 0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
82 return *this;
83 }
84};
85
86/***************************************************************************
87 * specialization for SparseMatrix
88 ***************************************************************************/
89
90namespace internal {
91
92template <typename SparseMatrixType, int BlockRows, int BlockCols>
93class sparse_matrix_block_impl : public SparseCompressedBase<Block<SparseMatrixType, BlockRows, BlockCols, true> > {
94 typedef internal::remove_all_t<typename SparseMatrixType::Nested> MatrixTypeNested_;
95 typedef Block<SparseMatrixType, BlockRows, BlockCols, true> BlockType;
97 using Base::convert_index;
98
99 public:
100 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
101 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
102 protected:
103 typedef typename Base::IndexVector IndexVector;
104 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
105
106 public:
107 inline sparse_matrix_block_impl(SparseMatrixType& xpr, Index i)
108 : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize) {}
109
110 inline sparse_matrix_block_impl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows,
111 Index blockCols)
112 : m_matrix(xpr),
113 m_outerStart(convert_index(IsRowMajor ? startRow : startCol)),
114 m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols)) {}
115
116 template <typename OtherDerived>
117 inline BlockType& operator=(const SparseMatrixBase<OtherDerived>& other) {
118 typedef internal::remove_all_t<typename SparseMatrixType::Nested> NestedMatrixType_;
119 NestedMatrixType_& matrix = m_matrix;
120 // This assignment is slow if this vector set is not empty
121 // and/or it is not at the end of the nonzeros of the underlying matrix.
122
123 // 1 - eval to a temporary to avoid transposition and/or aliasing issues
124 Ref<const SparseMatrix<Scalar, IsRowMajor ? RowMajor : ColMajor, StorageIndex> > tmp(other.derived());
125 eigen_internal_assert(tmp.outerSize() == m_outerSize.value());
126
127 // 2 - let's check whether there is enough allocated memory
128 Index nnz = tmp.nonZeros();
129 Index start =
130 m_outerStart == 0 ? 0 : m_matrix.outerIndexPtr()[m_outerStart]; // starting position of the current block
131 Index end = m_matrix.outerIndexPtr()[m_outerStart + m_outerSize.value()]; // ending position of the current block
132 Index block_size = end - start; // available room in the current block
133 Index tail_size = m_matrix.outerIndexPtr()[m_matrix.outerSize()] - end;
134
135 Index free_size = m_matrix.isCompressed() ? Index(matrix.data().allocatedSize()) + block_size : block_size;
136
137 Index tmp_start = tmp.outerIndexPtr()[0];
138
139 bool update_trailing_pointers = false;
140 if (nnz > free_size) {
141 // realloc manually to reduce copies
142 typename SparseMatrixType::Storage newdata(m_matrix.data().allocatedSize() - block_size + nnz);
143
144 internal::smart_copy(m_matrix.valuePtr(), m_matrix.valuePtr() + start, newdata.valuePtr());
145 internal::smart_copy(m_matrix.innerIndexPtr(), m_matrix.innerIndexPtr() + start, newdata.indexPtr());
146
147 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, newdata.valuePtr() + start);
148 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz,
149 newdata.indexPtr() + start);
150
151 internal::smart_copy(matrix.valuePtr() + end, matrix.valuePtr() + end + tail_size,
152 newdata.valuePtr() + start + nnz);
153 internal::smart_copy(matrix.innerIndexPtr() + end, matrix.innerIndexPtr() + end + tail_size,
154 newdata.indexPtr() + start + nnz);
155
156 newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
157
158 matrix.data().swap(newdata);
159
160 update_trailing_pointers = true;
161 } else {
162 if (m_matrix.isCompressed() && nnz != block_size) {
163 // no need to realloc, simply copy the tail at its respective position and insert tmp
164 matrix.data().resize(start + nnz + tail_size);
165
166 internal::smart_memmove(matrix.valuePtr() + end, matrix.valuePtr() + end + tail_size,
167 matrix.valuePtr() + start + nnz);
168 internal::smart_memmove(matrix.innerIndexPtr() + end, matrix.innerIndexPtr() + end + tail_size,
169 matrix.innerIndexPtr() + start + nnz);
170
171 update_trailing_pointers = true;
172 }
173
174 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, matrix.valuePtr() + start);
175 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz,
176 matrix.innerIndexPtr() + start);
177 }
178
179 // update outer index pointers and innerNonZeros
181 if (!m_matrix.isCompressed()) matrix.innerNonZeroPtr()[m_outerStart] = StorageIndex(nnz);
182 matrix.outerIndexPtr()[m_outerStart] = StorageIndex(start);
183 } else {
184 StorageIndex p = StorageIndex(start);
185 for (Index k = 0; k < m_outerSize.value(); ++k) {
186 StorageIndex nnz_k = internal::convert_index<StorageIndex>(tmp.innerVector(k).nonZeros());
187 if (!m_matrix.isCompressed()) matrix.innerNonZeroPtr()[m_outerStart + k] = nnz_k;
188 matrix.outerIndexPtr()[m_outerStart + k] = p;
189 p += nnz_k;
190 }
191 }
192
193 if (update_trailing_pointers) {
194 StorageIndex offset = internal::convert_index<StorageIndex>(nnz - block_size);
195 for (Index k = m_outerStart + m_outerSize.value(); k <= matrix.outerSize(); ++k) {
196 matrix.outerIndexPtr()[k] += offset;
197 }
198 }
199
200 return derived();
201 }
202
203 inline BlockType& operator=(const BlockType& other) { return operator= <BlockType>(other); }
204
205 inline const Scalar* valuePtr() const { return m_matrix.valuePtr(); }
206 inline Scalar* valuePtr() { return m_matrix.valuePtr(); }
207
208 inline const StorageIndex* innerIndexPtr() const { return m_matrix.innerIndexPtr(); }
209 inline StorageIndex* innerIndexPtr() { return m_matrix.innerIndexPtr(); }
210
211 inline const StorageIndex* outerIndexPtr() const { return m_matrix.outerIndexPtr() + m_outerStart; }
212 inline StorageIndex* outerIndexPtr() { return m_matrix.outerIndexPtr() + m_outerStart; }
213
214 inline const StorageIndex* innerNonZeroPtr() const {
215 return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr() + m_outerStart);
216 }
217 inline StorageIndex* innerNonZeroPtr() { return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr() + m_outerStart); }
218
219 bool isCompressed() const { return m_matrix.innerNonZeroPtr() == 0; }
220
221 inline Scalar& coeffRef(Index row, Index col) {
222 return m_matrix.coeffRef(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
223 }
224
225 inline const Scalar coeff(Index row, Index col) const {
226 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
227 }
228
229 inline const Scalar coeff(Index index) const {
230 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
231 }
232
233 const Scalar& lastCoeff() const {
234 EIGEN_STATIC_ASSERT_VECTOR_ONLY(sparse_matrix_block_impl);
235 eigen_assert(Base::nonZeros() > 0);
236 if (m_matrix.isCompressed())
237 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart + 1] - 1];
238 else
239 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart] + m_matrix.innerNonZeroPtr()[m_outerStart] - 1];
240 }
241
242 EIGEN_STRONG_INLINE Index rows() const { return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
243 EIGEN_STRONG_INLINE Index cols() const { return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
244
245 inline const SparseMatrixType& nestedExpression() const { return m_matrix; }
246 inline SparseMatrixType& nestedExpression() { return m_matrix; }
247 Index startRow() const { return IsRowMajor ? m_outerStart : 0; }
248 Index startCol() const { return IsRowMajor ? 0 : m_outerStart; }
249 Index blockRows() const { return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
250 Index blockCols() const { return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
251
252 protected:
253 typename internal::ref_selector<SparseMatrixType>::non_const_type m_matrix;
254 Index m_outerStart;
255 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
256};
257
258} // namespace internal
259
260template <typename Scalar_, int Options_, typename StorageIndex_, int BlockRows, int BlockCols>
261class BlockImpl<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true, Sparse>
262 : public internal::sparse_matrix_block_impl<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols> {
263 public:
264 typedef StorageIndex_ StorageIndex;
265 typedef SparseMatrix<Scalar_, Options_, StorageIndex_> SparseMatrixType;
266 typedef internal::sparse_matrix_block_impl<SparseMatrixType, BlockRows, BlockCols> Base;
267 inline BlockImpl(SparseMatrixType& xpr, Index i) : Base(xpr, i) {}
268
269 inline BlockImpl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
270 : Base(xpr, startRow, startCol, blockRows, blockCols) {}
271
272 using Base::operator=;
273};
274
275template <typename Scalar_, int Options_, typename StorageIndex_, int BlockRows, int BlockCols>
276class BlockImpl<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true, Sparse>
277 : public internal::sparse_matrix_block_impl<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows,
278 BlockCols> {
279 public:
280 typedef StorageIndex_ StorageIndex;
281 typedef const SparseMatrix<Scalar_, Options_, StorageIndex_> SparseMatrixType;
282 typedef internal::sparse_matrix_block_impl<SparseMatrixType, BlockRows, BlockCols> Base;
283 inline BlockImpl(SparseMatrixType& xpr, Index i) : Base(xpr, i) {}
284
285 inline BlockImpl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
286 : Base(xpr, startRow, startCol, blockRows, blockCols) {}
287
288 using Base::operator=;
289
290 private:
291 template <typename Derived>
292 BlockImpl(const SparseMatrixBase<Derived>& xpr, Index i);
293 template <typename Derived>
294 BlockImpl(const SparseMatrixBase<Derived>& xpr);
295};
296
297//----------
298
302template <typename XprType, int BlockRows, int BlockCols, bool InnerPanel>
303class BlockImpl<XprType, BlockRows, BlockCols, InnerPanel, Sparse>
304 : public SparseMatrixBase<Block<XprType, BlockRows, BlockCols, InnerPanel> >, internal::no_assignment_operator {
307 using Base::convert_index;
308
309 public:
310 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
311 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
312
313 typedef internal::remove_all_t<typename XprType::Nested> MatrixTypeNested_;
314
317 inline BlockImpl(XprType& xpr, Index i)
318 : m_matrix(xpr),
319 m_startRow((BlockRows == 1) && (BlockCols == XprType::ColsAtCompileTime) ? convert_index(i) : 0),
320 m_startCol((BlockRows == XprType::RowsAtCompileTime) && (BlockCols == 1) ? convert_index(i) : 0),
321 m_blockRows(BlockRows == 1 ? 1 : xpr.rows()),
322 m_blockCols(BlockCols == 1 ? 1 : xpr.cols()) {}
323
326 inline BlockImpl(XprType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
327 : m_matrix(xpr),
328 m_startRow(convert_index(startRow)),
329 m_startCol(convert_index(startCol)),
330 m_blockRows(convert_index(blockRows)),
331 m_blockCols(convert_index(blockCols)) {}
332
333 inline Index rows() const { return m_blockRows.value(); }
334 inline Index cols() const { return m_blockCols.value(); }
335
336 inline Scalar& coeffRef(Index row, Index col) {
337 return m_matrix.coeffRef(row + m_startRow.value(), col + m_startCol.value());
338 }
339
340 inline const Scalar coeff(Index row, Index col) const {
341 return m_matrix.coeff(row + m_startRow.value(), col + m_startCol.value());
342 }
343
344 inline Scalar& coeffRef(Index index) {
345 return m_matrix.coeffRef(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
346 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
347 }
348
349 inline const Scalar coeff(Index index) const {
350 return m_matrix.coeff(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
351 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
352 }
353
354 inline const XprType& nestedExpression() const { return m_matrix; }
355 inline XprType& nestedExpression() { return m_matrix; }
356 Index startRow() const { return m_startRow.value(); }
357 Index startCol() const { return m_startCol.value(); }
358 Index blockRows() const { return m_blockRows.value(); }
359 Index blockCols() const { return m_blockCols.value(); }
360
361 protected:
362 // friend class internal::GenericSparseBlockInnerIteratorImpl<XprType,BlockRows,BlockCols,InnerPanel>;
363 friend struct internal::unary_evaluator<Block<XprType, BlockRows, BlockCols, InnerPanel>, internal::IteratorBased,
364 Scalar>;
365
366 Index nonZeros() const { return Dynamic; }
367
368 typename internal::ref_selector<XprType>::non_const_type m_matrix;
369 const internal::variable_if_dynamic<Index, XprType::RowsAtCompileTime == 1 ? 0 : Dynamic> m_startRow;
370 const internal::variable_if_dynamic<Index, XprType::ColsAtCompileTime == 1 ? 0 : Dynamic> m_startCol;
371 const internal::variable_if_dynamic<Index, RowsAtCompileTime> m_blockRows;
372 const internal::variable_if_dynamic<Index, ColsAtCompileTime> m_blockCols;
373
374 protected:
375 // Disable assignment with clear error message.
376 // Note that simply removing operator= yields compilation errors with ICC+MSVC
377 template <typename T>
378 BlockImpl& operator=(const T&) {
379 EIGEN_STATIC_ASSERT(sizeof(T) == 0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
380 return *this;
381 }
382};
383
384namespace internal {
385
386template <typename ArgType, int BlockRows, int BlockCols, bool InnerPanel>
387struct unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>
388 : public evaluator_base<Block<ArgType, BlockRows, BlockCols, InnerPanel> > {
389 class InnerVectorInnerIterator;
390 class OuterVectorInnerIterator;
391
392 public:
393 typedef Block<ArgType, BlockRows, BlockCols, InnerPanel> XprType;
394 typedef typename XprType::StorageIndex StorageIndex;
395 typedef typename XprType::Scalar Scalar;
396
397 enum {
398 IsRowMajor = XprType::IsRowMajor,
399 OuterVector = (BlockCols == 1 && ArgType::IsRowMajor) || (BlockRows == 1 && !ArgType::IsRowMajor),
400 CoeffReadCost = evaluator<ArgType>::CoeffReadCost,
401 Flags = XprType::Flags
402 };
403
404 typedef std::conditional_t<OuterVector, OuterVectorInnerIterator, InnerVectorInnerIterator> InnerIterator;
405
406 explicit unary_evaluator(const XprType& op) : m_argImpl(op.nestedExpression()), m_block(op) {}
407
408 inline Index nonZerosEstimate() const {
409 const Index nnz = m_block.nonZeros();
410 if (nnz < 0) {
411 // Scale the non-zero estimate for the underlying expression linearly with block size.
412 // Return zero if the underlying block is empty.
413 const Index nested_sz = m_block.nestedExpression().size();
414 return nested_sz == 0 ? 0 : m_argImpl.nonZerosEstimate() * m_block.size() / nested_sz;
415 }
416 return nnz;
417 }
418
419 protected:
420 typedef typename evaluator<ArgType>::InnerIterator EvalIterator;
421
422 evaluator<ArgType> m_argImpl;
423 const XprType& m_block;
424};
425
426template <typename ArgType, int BlockRows, int BlockCols, bool InnerPanel>
427class unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>::InnerVectorInnerIterator
428 : public EvalIterator {
429 // NOTE MSVC fails to compile if we don't explicitly "import" IsRowMajor from unary_evaluator
430 // because the base class EvalIterator has a private IsRowMajor enum too. (bug #1786)
431 // NOTE We cannot call it IsRowMajor because it would shadow unary_evaluator::IsRowMajor
432 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
433 const XprType& m_block;
434 Index m_end;
435
436 public:
437 EIGEN_STRONG_INLINE InnerVectorInnerIterator(const unary_evaluator& aEval, Index outer)
438 : EvalIterator(aEval.m_argImpl, outer + (XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol())),
439 m_block(aEval.m_block),
440 m_end(XprIsRowMajor ? aEval.m_block.startCol() + aEval.m_block.blockCols()
441 : aEval.m_block.startRow() + aEval.m_block.blockRows()) {
442 while ((EvalIterator::operator bool()) &&
443 (EvalIterator::index() < (XprIsRowMajor ? m_block.startCol() : m_block.startRow())))
444 EvalIterator::operator++();
445 }
446
447 inline StorageIndex index() const {
448 return EvalIterator::index() - convert_index<StorageIndex>(XprIsRowMajor ? m_block.startCol() : m_block.startRow());
449 }
450 inline Index outer() const {
451 return EvalIterator::outer() - (XprIsRowMajor ? m_block.startRow() : m_block.startCol());
452 }
453 inline Index row() const { return EvalIterator::row() - m_block.startRow(); }
454 inline Index col() const { return EvalIterator::col() - m_block.startCol(); }
455
456 inline operator bool() const { return EvalIterator::operator bool() && EvalIterator::index() < m_end; }
457};
458
459template <typename ArgType, int BlockRows, int BlockCols, bool InnerPanel>
460class unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>::OuterVectorInnerIterator {
461 // NOTE see above
462 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
463 const unary_evaluator& m_eval;
464 Index m_outerPos;
465 const Index m_innerIndex;
466 Index m_end;
467 EvalIterator m_it;
468
469 public:
470 EIGEN_STRONG_INLINE OuterVectorInnerIterator(const unary_evaluator& aEval, Index outer)
471 : m_eval(aEval),
472 m_outerPos((XprIsRowMajor ? aEval.m_block.startCol() : aEval.m_block.startRow())),
473 m_innerIndex(XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol()),
474 m_end(XprIsRowMajor ? aEval.m_block.startCol() + aEval.m_block.blockCols()
475 : aEval.m_block.startRow() + aEval.m_block.blockRows()),
476 m_it(m_eval.m_argImpl, m_outerPos) {
477 EIGEN_UNUSED_VARIABLE(outer);
478 eigen_assert(outer == 0);
479
480 while (m_it && m_it.index() < m_innerIndex) ++m_it;
481 if ((!m_it) || (m_it.index() != m_innerIndex)) ++(*this);
482 }
483
484 inline StorageIndex index() const {
485 return convert_index<StorageIndex>(m_outerPos -
486 (XprIsRowMajor ? m_eval.m_block.startCol() : m_eval.m_block.startRow()));
487 }
488 inline Index outer() const { return 0; }
489 inline Index row() const { return XprIsRowMajor ? 0 : index(); }
490 inline Index col() const { return XprIsRowMajor ? index() : 0; }
491
492 inline Scalar value() const { return m_it.value(); }
493 inline Scalar& valueRef() { return m_it.valueRef(); }
494
495 inline OuterVectorInnerIterator& operator++() {
496 // search next non-zero entry
497 while (++m_outerPos < m_end) {
498 // Restart iterator at the next inner-vector:
499 internal::destroy_at(&m_it);
500 internal::construct_at(&m_it, m_eval.m_argImpl, m_outerPos);
501 // search for the key m_innerIndex in the current outer-vector
502 while (m_it && m_it.index() < m_innerIndex) ++m_it;
503 if (m_it && m_it.index() == m_innerIndex) break;
504 }
505 return *this;
506 }
507
508 inline operator bool() const { return m_outerPos < m_end; }
509};
510
511template <typename Scalar_, int Options_, typename StorageIndex_, int BlockRows, int BlockCols>
512struct unary_evaluator<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true>, IteratorBased>
513 : evaluator<
514 SparseCompressedBase<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> > > {
515 typedef Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> XprType;
516 typedef evaluator<SparseCompressedBase<XprType> > Base;
517 explicit unary_evaluator(const XprType& xpr) : Base(xpr) {}
518};
519
520template <typename Scalar_, int Options_, typename StorageIndex_, int BlockRows, int BlockCols>
521struct unary_evaluator<Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true>,
522 IteratorBased>
523 : evaluator<SparseCompressedBase<
524 Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> > > {
525 typedef Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> XprType;
526 typedef evaluator<SparseCompressedBase<XprType> > Base;
527 explicit unary_evaluator(const XprType& xpr) : Base(xpr) {}
528};
529
530} // end namespace internal
531
532} // end namespace Eigen
533
534#endif // EIGEN_SPARSE_BLOCK_H
BlockImpl(XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition SparseBlock.h:326
BlockImpl(XprType &xpr, Index i)
Definition SparseBlock.h:317
Expression of a fixed-size or dynamic-size block.
Definition Block.h:110
const StorageIndex * innerIndexPtr() const
Definition SparseCompressedBase.h:87
const Scalar * valuePtr() const
Definition SparseCompressedBase.h:78
const StorageIndex * outerIndexPtr() const
Definition SparseCompressedBase.h:97
const StorageIndex * innerNonZeroPtr() const
Definition SparseCompressedBase.h:107
Base class of any sparse matrices or sparse expressions.
Definition SparseMatrixBase.h:30
internal::traits< Block< SparseMatrixType, BlockRows, BlockCols, true > >::StorageIndex StorageIndex
Definition SparseMatrixBase.h:44
Namespace containing all symbols from the Eigen library.
Definition Core:137
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:83
const int Dynamic
Definition Constants.h:25
Block< SparseMatrixType, BlockRows, BlockCols, true > & derived()
Definition EigenBase.h:49
Definition Constants.h:519