10#ifndef EIGEN_SPARSE_BLOCK_H
11#define EIGEN_SPARSE_BLOCK_H
14#include "./InternalHeaderCheck.h"
19template <
typename XprType,
int BlockRows,
int BlockCols>
20class BlockImpl<XprType, BlockRows, BlockCols, true, Sparse>
21 :
public SparseMatrixBase<Block<XprType, BlockRows, BlockCols, true> > {
22 typedef internal::remove_all_t<typename XprType::Nested> MatrixTypeNested_;
23 typedef Block<XprType, BlockRows, BlockCols, true> BlockType;
26 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
29 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
30 typedef SparseMatrixBase<BlockType> Base;
31 using Base::convert_index;
34 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
36 inline BlockImpl(XprType& xpr, Index i) : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize) {}
38 inline BlockImpl(XprType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
40 m_outerStart(convert_index(IsRowMajor ? startRow : startCol)),
41 m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols)) {}
43 EIGEN_STRONG_INLINE Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
44 EIGEN_STRONG_INLINE Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
46 Index nonZeros()
const {
47 typedef internal::evaluator<XprType> EvaluatorType;
48 EvaluatorType matEval(m_matrix);
50 Index end = m_outerStart + m_outerSize.value();
51 for (Index j = m_outerStart; j < end; ++j)
52 for (
typename EvaluatorType::InnerIterator it(matEval, j); it; ++it) ++nnz;
56 inline const Scalar coeff(Index row, Index col)
const {
57 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
60 inline const Scalar coeff(Index index)
const {
61 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
64 inline const XprType& nestedExpression()
const {
return m_matrix; }
65 inline XprType& nestedExpression() {
return m_matrix; }
66 Index startRow()
const {
return IsRowMajor ? m_outerStart : 0; }
67 Index startCol()
const {
return IsRowMajor ? 0 : m_outerStart; }
68 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
69 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
72 typename internal::ref_selector<XprType>::non_const_type m_matrix;
74 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
80 BlockImpl& operator=(
const T&) {
81 EIGEN_STATIC_ASSERT(
sizeof(T) == 0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
92template <
typename SparseMatrixType,
int BlockRows,
int BlockCols>
93class sparse_matrix_block_impl :
public SparseCompressedBase<Block<SparseMatrixType, BlockRows, BlockCols, true> > {
94 typedef internal::remove_all_t<typename SparseMatrixType::Nested> MatrixTypeNested_;
95 typedef Block<SparseMatrixType, BlockRows, BlockCols, true> BlockType;
97 using Base::convert_index;
100 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
101 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
103 typedef typename Base::IndexVector IndexVector;
104 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
107 inline sparse_matrix_block_impl(SparseMatrixType& xpr, Index i)
108 : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize) {}
110 inline sparse_matrix_block_impl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows,
113 m_outerStart(convert_index(IsRowMajor ? startRow : startCol)),
114 m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols)) {}
116 template <
typename OtherDerived>
117 inline BlockType& operator=(
const SparseMatrixBase<OtherDerived>& other) {
118 typedef internal::remove_all_t<typename SparseMatrixType::Nested> NestedMatrixType_;
119 NestedMatrixType_& matrix = m_matrix;
124 Ref<const SparseMatrix<Scalar, IsRowMajor ? RowMajor : ColMajor, StorageIndex> > tmp(other.derived());
125 eigen_internal_assert(tmp.outerSize() == m_outerSize.value());
128 Index nnz = tmp.nonZeros();
130 m_outerStart == 0 ? 0 : m_matrix.outerIndexPtr()[m_outerStart];
131 Index end = m_matrix.outerIndexPtr()[m_outerStart + m_outerSize.value()];
132 Index block_size = end - start;
133 Index tail_size = m_matrix.outerIndexPtr()[m_matrix.outerSize()] - end;
135 Index free_size = m_matrix.isCompressed() ?
Index(matrix.data().allocatedSize()) + block_size : block_size;
137 Index tmp_start = tmp.outerIndexPtr()[0];
139 bool update_trailing_pointers =
false;
140 if (nnz > free_size) {
142 typename SparseMatrixType::Storage newdata(m_matrix.data().allocatedSize() - block_size + nnz);
144 internal::smart_copy(m_matrix.valuePtr(), m_matrix.valuePtr() + start, newdata.valuePtr());
145 internal::smart_copy(m_matrix.innerIndexPtr(), m_matrix.innerIndexPtr() + start, newdata.indexPtr());
147 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, newdata.valuePtr() + start);
148 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz,
149 newdata.indexPtr() + start);
151 internal::smart_copy(matrix.valuePtr() + end, matrix.valuePtr() + end + tail_size,
152 newdata.valuePtr() + start + nnz);
153 internal::smart_copy(matrix.innerIndexPtr() + end, matrix.innerIndexPtr() + end + tail_size,
154 newdata.indexPtr() + start + nnz);
156 newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
158 matrix.data().swap(newdata);
160 update_trailing_pointers =
true;
162 if (m_matrix.isCompressed() && nnz != block_size) {
164 matrix.data().resize(start + nnz + tail_size);
166 internal::smart_memmove(matrix.valuePtr() + end, matrix.valuePtr() + end + tail_size,
167 matrix.valuePtr() + start + nnz);
168 internal::smart_memmove(matrix.innerIndexPtr() + end, matrix.innerIndexPtr() + end + tail_size,
169 matrix.innerIndexPtr() + start + nnz);
171 update_trailing_pointers =
true;
174 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, matrix.valuePtr() + start);
175 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz,
176 matrix.innerIndexPtr() + start);
181 if (!m_matrix.isCompressed()) matrix.innerNonZeroPtr()[m_outerStart] =
StorageIndex(nnz);
182 matrix.outerIndexPtr()[m_outerStart] =
StorageIndex(start);
185 for (Index k = 0; k < m_outerSize.value(); ++k) {
186 StorageIndex nnz_k = internal::convert_index<StorageIndex>(tmp.innerVector(k).nonZeros());
187 if (!m_matrix.isCompressed()) matrix.innerNonZeroPtr()[m_outerStart + k] = nnz_k;
188 matrix.outerIndexPtr()[m_outerStart + k] = p;
193 if (update_trailing_pointers) {
194 StorageIndex offset = internal::convert_index<StorageIndex>(nnz - block_size);
195 for (Index k = m_outerStart + m_outerSize.value(); k <= matrix.outerSize(); ++k) {
196 matrix.outerIndexPtr()[k] += offset;
203 inline BlockType& operator=(
const BlockType& other) {
return operator= <BlockType>(other); }
205 inline const Scalar*
valuePtr()
const {
return m_matrix.valuePtr(); }
206 inline Scalar*
valuePtr() {
return m_matrix.valuePtr(); }
208 inline const StorageIndex*
innerIndexPtr()
const {
return m_matrix.innerIndexPtr(); }
209 inline StorageIndex*
innerIndexPtr() {
return m_matrix.innerIndexPtr(); }
211 inline const StorageIndex*
outerIndexPtr()
const {
return m_matrix.outerIndexPtr() + m_outerStart; }
212 inline StorageIndex*
outerIndexPtr() {
return m_matrix.outerIndexPtr() + m_outerStart; }
215 return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr() + m_outerStart);
219 bool isCompressed()
const {
return m_matrix.innerNonZeroPtr() == 0; }
221 inline Scalar& coeffRef(Index row, Index col) {
222 return m_matrix.coeffRef(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
225 inline const Scalar coeff(Index row, Index col)
const {
226 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
229 inline const Scalar coeff(Index index)
const {
230 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
233 const Scalar& lastCoeff()
const {
234 EIGEN_STATIC_ASSERT_VECTOR_ONLY(sparse_matrix_block_impl);
236 if (m_matrix.isCompressed())
237 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart + 1] - 1];
239 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart] + m_matrix.innerNonZeroPtr()[m_outerStart] - 1];
242 EIGEN_STRONG_INLINE Index
rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
243 EIGEN_STRONG_INLINE Index
cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
245 inline const SparseMatrixType& nestedExpression()
const {
return m_matrix; }
246 inline SparseMatrixType& nestedExpression() {
return m_matrix; }
247 Index startRow()
const {
return IsRowMajor ? m_outerStart : 0; }
248 Index startCol()
const {
return IsRowMajor ? 0 : m_outerStart; }
249 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
250 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
253 typename internal::ref_selector<SparseMatrixType>::non_const_type m_matrix;
255 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
260template <
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
261class BlockImpl<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true, Sparse>
262 :
public internal::sparse_matrix_block_impl<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols> {
264 typedef StorageIndex_ StorageIndex;
265 typedef SparseMatrix<Scalar_, Options_, StorageIndex_> SparseMatrixType;
266 typedef internal::sparse_matrix_block_impl<SparseMatrixType, BlockRows, BlockCols> Base;
267 inline BlockImpl(SparseMatrixType& xpr, Index i) : Base(xpr, i) {}
269 inline BlockImpl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
270 : Base(xpr, startRow, startCol, blockRows, blockCols) {}
272 using Base::operator=;
275template <
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
276class BlockImpl<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true, Sparse>
277 :
public internal::sparse_matrix_block_impl<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows,
280 typedef StorageIndex_ StorageIndex;
281 typedef const SparseMatrix<Scalar_, Options_, StorageIndex_> SparseMatrixType;
282 typedef internal::sparse_matrix_block_impl<SparseMatrixType, BlockRows, BlockCols> Base;
283 inline BlockImpl(SparseMatrixType& xpr, Index i) : Base(xpr, i) {}
285 inline BlockImpl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
286 : Base(xpr, startRow, startCol, blockRows, blockCols) {}
288 using Base::operator=;
291 template <
typename Derived>
292 BlockImpl(
const SparseMatrixBase<Derived>& xpr, Index i);
293 template <
typename Derived>
294 BlockImpl(
const SparseMatrixBase<Derived>& xpr);
302template <
typename XprType,
int BlockRows,
int BlockCols,
bool InnerPanel>
303class BlockImpl<XprType, BlockRows, BlockCols, InnerPanel,
Sparse>
304 :
public SparseMatrixBase<Block<XprType, BlockRows, BlockCols, InnerPanel> >, internal::no_assignment_operator {
307 using Base::convert_index;
310 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
313 typedef internal::remove_all_t<typename XprType::Nested> MatrixTypeNested_;
319 m_startRow((BlockRows == 1) && (BlockCols == XprType::ColsAtCompileTime) ? convert_index(i) : 0),
320 m_startCol((BlockRows == XprType::RowsAtCompileTime) && (BlockCols == 1) ? convert_index(i) : 0),
321 m_blockRows(BlockRows == 1 ? 1 : xpr.rows()),
322 m_blockCols(BlockCols == 1 ? 1 : xpr.cols()) {}
328 m_startRow(convert_index(startRow)),
329 m_startCol(convert_index(startCol)),
330 m_blockRows(convert_index(blockRows)),
331 m_blockCols(convert_index(blockCols)) {}
333 inline Index rows()
const {
return m_blockRows.value(); }
334 inline Index cols()
const {
return m_blockCols.value(); }
336 inline Scalar& coeffRef(Index row, Index col) {
337 return m_matrix.coeffRef(row + m_startRow.value(), col + m_startCol.value());
340 inline const Scalar coeff(Index row, Index col)
const {
341 return m_matrix.coeff(row + m_startRow.value(), col + m_startCol.value());
344 inline Scalar& coeffRef(Index index) {
345 return m_matrix.coeffRef(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
346 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
349 inline const Scalar coeff(Index index)
const {
350 return m_matrix.coeff(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
351 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
354 inline const XprType& nestedExpression()
const {
return m_matrix; }
355 inline XprType& nestedExpression() {
return m_matrix; }
356 Index startRow()
const {
return m_startRow.value(); }
357 Index startCol()
const {
return m_startCol.value(); }
358 Index blockRows()
const {
return m_blockRows.value(); }
359 Index blockCols()
const {
return m_blockCols.value(); }
363 friend struct internal::unary_evaluator<Block<XprType, BlockRows, BlockCols, InnerPanel>, internal::IteratorBased,
366 Index nonZeros() const {
return Dynamic; }
368 typename internal::ref_selector<XprType>::non_const_type m_matrix;
369 const internal::variable_if_dynamic<Index, XprType::RowsAtCompileTime == 1 ? 0 : Dynamic> m_startRow;
370 const internal::variable_if_dynamic<Index, XprType::ColsAtCompileTime == 1 ? 0 : Dynamic> m_startCol;
371 const internal::variable_if_dynamic<Index, RowsAtCompileTime> m_blockRows;
372 const internal::variable_if_dynamic<Index, ColsAtCompileTime> m_blockCols;
377 template <
typename T>
378 BlockImpl& operator=(
const T&) {
379 EIGEN_STATIC_ASSERT(
sizeof(T) == 0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
386template <
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
387struct unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>
388 :
public evaluator_base<Block<ArgType, BlockRows, BlockCols, InnerPanel> > {
389 class InnerVectorInnerIterator;
390 class OuterVectorInnerIterator;
393 typedef Block<ArgType, BlockRows, BlockCols, InnerPanel> XprType;
394 typedef typename XprType::StorageIndex StorageIndex;
395 typedef typename XprType::Scalar Scalar;
398 IsRowMajor = XprType::IsRowMajor,
399 OuterVector = (BlockCols == 1 && ArgType::IsRowMajor) || (BlockRows == 1 && !ArgType::IsRowMajor),
400 CoeffReadCost = evaluator<ArgType>::CoeffReadCost,
401 Flags = XprType::Flags
404 typedef std::conditional_t<OuterVector, OuterVectorInnerIterator, InnerVectorInnerIterator> InnerIterator;
406 explicit unary_evaluator(
const XprType& op) : m_argImpl(op.nestedExpression()), m_block(op) {}
408 inline Index nonZerosEstimate()
const {
409 const Index nnz = m_block.nonZeros();
413 const Index nested_sz = m_block.nestedExpression().size();
414 return nested_sz == 0 ? 0 : m_argImpl.nonZerosEstimate() * m_block.size() / nested_sz;
420 typedef typename evaluator<ArgType>::InnerIterator EvalIterator;
422 evaluator<ArgType> m_argImpl;
423 const XprType& m_block;
426template <
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
427class unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>::InnerVectorInnerIterator
428 :
public EvalIterator {
432 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
433 const XprType& m_block;
437 EIGEN_STRONG_INLINE InnerVectorInnerIterator(
const unary_evaluator& aEval, Index outer)
438 : EvalIterator(aEval.m_argImpl, outer + (XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol())),
439 m_block(aEval.m_block),
440 m_end(XprIsRowMajor ? aEval.m_block.startCol() + aEval.m_block.blockCols()
441 : aEval.m_block.startRow() + aEval.m_block.blockRows()) {
442 while ((EvalIterator::operator
bool()) &&
443 (EvalIterator::index() < (XprIsRowMajor ? m_block.startCol() : m_block.startRow())))
444 EvalIterator::operator++();
447 inline StorageIndex index()
const {
448 return EvalIterator::index() - convert_index<StorageIndex>(XprIsRowMajor ? m_block.startCol() : m_block.startRow());
450 inline Index outer()
const {
451 return EvalIterator::outer() - (XprIsRowMajor ? m_block.startRow() : m_block.startCol());
453 inline Index row()
const {
return EvalIterator::row() - m_block.startRow(); }
454 inline Index col()
const {
return EvalIterator::col() - m_block.startCol(); }
456 inline operator bool()
const {
return EvalIterator::operator bool() && EvalIterator::index() < m_end; }
459template <
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
460class unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>::OuterVectorInnerIterator {
462 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
463 const unary_evaluator& m_eval;
465 const Index m_innerIndex;
470 EIGEN_STRONG_INLINE OuterVectorInnerIterator(
const unary_evaluator& aEval, Index outer)
472 m_outerPos((XprIsRowMajor ? aEval.m_block.startCol() : aEval.m_block.startRow())),
473 m_innerIndex(XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol()),
474 m_end(XprIsRowMajor ? aEval.m_block.startCol() + aEval.m_block.blockCols()
475 : aEval.m_block.startRow() + aEval.m_block.blockRows()),
476 m_it(m_eval.m_argImpl, m_outerPos) {
477 EIGEN_UNUSED_VARIABLE(outer);
478 eigen_assert(outer == 0);
480 while (m_it && m_it.index() < m_innerIndex) ++m_it;
481 if ((!m_it) || (m_it.index() != m_innerIndex)) ++(*this);
484 inline StorageIndex index()
const {
485 return convert_index<StorageIndex>(m_outerPos -
486 (XprIsRowMajor ? m_eval.m_block.startCol() : m_eval.m_block.startRow()));
488 inline Index outer()
const {
return 0; }
489 inline Index row()
const {
return XprIsRowMajor ? 0 : index(); }
490 inline Index col()
const {
return XprIsRowMajor ? index() : 0; }
492 inline Scalar value()
const {
return m_it.value(); }
493 inline Scalar& valueRef() {
return m_it.valueRef(); }
495 inline OuterVectorInnerIterator& operator++() {
497 while (++m_outerPos < m_end) {
499 internal::destroy_at(&m_it);
500 internal::construct_at(&m_it, m_eval.m_argImpl, m_outerPos);
502 while (m_it && m_it.index() < m_innerIndex) ++m_it;
503 if (m_it && m_it.index() == m_innerIndex)
break;
508 inline operator bool()
const {
return m_outerPos < m_end; }
511template <
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
512struct unary_evaluator<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true>, IteratorBased>
514 SparseCompressedBase<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> > > {
515 typedef Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols,
true> XprType;
516 typedef evaluator<SparseCompressedBase<XprType> > Base;
517 explicit unary_evaluator(
const XprType& xpr) : Base(xpr) {}
520template <
typename Scalar_,
int Options_,
typename StorageIndex_,
int BlockRows,
int BlockCols>
521struct unary_evaluator<Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true>,
523 : evaluator<SparseCompressedBase<
524 Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> > > {
525 typedef Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols,
true> XprType;
526 typedef evaluator<SparseCompressedBase<XprType> > Base;
527 explicit unary_evaluator(
const XprType& xpr) : Base(xpr) {}
BlockImpl(XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition SparseBlock.h:326
BlockImpl(XprType &xpr, Index i)
Definition SparseBlock.h:317
Expression of a fixed-size or dynamic-size block.
Definition Block.h:110
Index nonZeros() const
Definition SparseCompressedBase.h:64
const StorageIndex * innerIndexPtr() const
Definition SparseCompressedBase.h:87
const Scalar * valuePtr() const
Definition SparseCompressedBase.h:78
bool isCompressed() const
Definition SparseCompressedBase.h:114
const StorageIndex * outerIndexPtr() const
Definition SparseCompressedBase.h:97
const StorageIndex * innerNonZeroPtr() const
Definition SparseCompressedBase.h:107
SparseCompressedBase()
Definition SparseCompressedBase.h:177
Base class of any sparse matrices or sparse expressions.
Definition SparseMatrixBase.h:30
internal::traits< Block< SparseMatrixType, BlockRows, BlockCols, true > >::StorageIndex StorageIndex
Definition SparseMatrixBase.h:44
Index rows() const
Definition SparseMatrixBase.h:182
@ IsVectorAtCompileTime
Definition SparseMatrixBase.h:81
Index cols() const
Definition SparseMatrixBase.h:184
Namespace containing all symbols from the Eigen library.
Definition Core:137
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:83
const int Dynamic
Definition Constants.h:25
Eigen::Index Index
Definition EigenBase.h:43
Block< SparseMatrixType, BlockRows, BlockCols, true > & derived()
Definition EigenBase.h:49
Definition Constants.h:519