10#ifndef EIGEN_AMBIVECTOR_H
11#define EIGEN_AMBIVECTOR_H
14#include "./InternalHeaderCheck.h"
25template <
typename Scalar_,
typename StorageIndex_>
28 typedef Scalar_ Scalar;
29 typedef StorageIndex_ StorageIndex;
30 typedef typename NumTraits<Scalar>::Real RealScalar;
32 explicit AmbiVector(Index size)
33 : m_buffer(0), m_zero(0), m_size(0), m_end(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1) {
37 void init(
double estimatedDensity);
40 Index nonZeros()
const;
43 void setBounds(Index start, Index end) {
44 m_start = convert_index(start);
45 m_end = convert_index(end);
51 Scalar& coeffRef(Index i);
52 Scalar& coeff(Index i);
56 ~AmbiVector() {
delete[] m_buffer; }
58 void resize(Index size) {
59 if (m_allocatedSize < size) reallocate(size);
60 m_size = convert_index(size);
63 StorageIndex size()
const {
return m_size; }
66 StorageIndex convert_index(Index idx) {
return internal::convert_index<StorageIndex>(idx); }
68 void reallocate(Index size) {
73 Index allocSize = (size *
sizeof(ListEl) +
sizeof(Scalar) - 1) /
sizeof(Scalar);
74 m_allocatedElements = convert_index((allocSize *
sizeof(Scalar)) /
sizeof(ListEl));
75 m_buffer =
new Scalar[allocSize];
77 m_allocatedElements = convert_index((size *
sizeof(Scalar)) /
sizeof(ListEl));
78 m_buffer =
new Scalar[size];
80 m_size = convert_index(size);
85 void reallocateSparse() {
86 Index copyElements = m_allocatedElements;
87 m_allocatedElements = (std::min)(StorageIndex(m_allocatedElements * 1.5), m_size);
88 Index allocSize = m_allocatedElements *
sizeof(ListEl);
89 allocSize = (allocSize +
sizeof(Scalar) - 1) /
sizeof(Scalar);
90 Scalar* newBuffer =
new Scalar[allocSize];
91 std::memcpy(newBuffer, m_buffer, copyElements *
sizeof(ListEl));
108 StorageIndex m_start;
110 StorageIndex m_allocatedSize;
111 StorageIndex m_allocatedElements;
115 StorageIndex m_llStart;
116 StorageIndex m_llCurrent;
117 StorageIndex m_llSize;
121template <
typename Scalar_,
typename StorageIndex_>
122Index AmbiVector<Scalar_, StorageIndex_>::nonZeros()
const {
123 if (m_mode == IsSparse)
126 return m_end - m_start;
129template <
typename Scalar_,
typename StorageIndex_>
130void AmbiVector<Scalar_, StorageIndex_>::init(
double estimatedDensity) {
131 if (estimatedDensity > 0.1)
137template <
typename Scalar_,
typename StorageIndex_>
138void AmbiVector<Scalar_, StorageIndex_>::init(
int mode) {
153template <
typename Scalar_,
typename StorageIndex_>
154void AmbiVector<Scalar_, StorageIndex_>::restart() {
155 m_llCurrent = m_llStart;
159template <
typename Scalar_,
typename StorageIndex_>
160void AmbiVector<Scalar_, StorageIndex_>::setZero() {
161 if (m_mode == IsDense) {
162 for (Index i = m_start; i < m_end; ++i) m_buffer[i] = Scalar(0);
164 eigen_assert(m_mode == IsSparse);
170template <
typename Scalar_,
typename StorageIndex_>
171Scalar_& AmbiVector<Scalar_, StorageIndex_>::coeffRef(Index i) {
172 if (m_mode == IsDense)
175 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
177 eigen_assert(m_mode == IsSparse);
183 llElements[0].value = Scalar(0);
184 llElements[0].index = convert_index(i);
185 llElements[0].next = -1;
186 return llElements[0].value;
187 }
else if (i < llElements[m_llStart].index) {
189 ListEl& el = llElements[m_llSize];
190 el.value = Scalar(0);
191 el.index = convert_index(i);
193 m_llStart = m_llSize;
195 m_llCurrent = m_llStart;
198 StorageIndex nextel = llElements[m_llCurrent].next;
199 eigen_assert(i >= llElements[m_llCurrent].index &&
200 "you must call restart() before inserting an element with lower or equal index");
201 while (nextel >= 0 && llElements[nextel].index <= i) {
202 m_llCurrent = nextel;
203 nextel = llElements[nextel].next;
206 if (llElements[m_llCurrent].index == i) {
208 return llElements[m_llCurrent].value;
210 if (m_llSize >= m_allocatedElements) {
212 llElements =
reinterpret_cast<ListEl*
>(m_buffer);
214 eigen_internal_assert(m_llSize < m_allocatedElements &&
"internal error: overflow in sparse mode");
216 ListEl& el = llElements[m_llSize];
217 el.value = Scalar(0);
218 el.index = convert_index(i);
219 el.next = llElements[m_llCurrent].next;
220 llElements[m_llCurrent].next = m_llSize;
228template <
typename Scalar_,
typename StorageIndex_>
229Scalar_& AmbiVector<Scalar_, StorageIndex_>::coeff(Index i) {
230 if (m_mode == IsDense)
233 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
234 eigen_assert(m_mode == IsSparse);
235 if ((m_llSize == 0) || (i < llElements[m_llStart].index)) {
238 Index elid = m_llStart;
239 while (elid >= 0 && llElements[elid].index < i) elid = llElements[elid].next;
241 if (llElements[elid].index == i)
242 return llElements[m_llCurrent].value;
250template <
typename Scalar_,
typename StorageIndex_>
251class AmbiVector<Scalar_, StorageIndex_>::Iterator {
253 typedef Scalar_ Scalar;
254 typedef typename NumTraits<Scalar>::Real RealScalar;
262 explicit Iterator(
const AmbiVector& vec,
const RealScalar& epsilon = 0) : m_vector(vec) {
265 m_isDense = m_vector.m_mode == IsDense;
269 m_cachedIndex = m_vector.m_start - 1;
272 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
273 m_currentEl = m_vector.m_llStart;
274 while (m_currentEl >= 0 &&
abs(llElements[m_currentEl].value) <= m_epsilon)
275 m_currentEl = llElements[m_currentEl].next;
276 if (m_currentEl < 0) {
280 m_cachedIndex = llElements[m_currentEl].index;
281 m_cachedValue = llElements[m_currentEl].value;
286 StorageIndex index()
const {
return m_cachedIndex; }
287 Scalar value()
const {
return m_cachedValue; }
289 operator bool()
const {
return m_cachedIndex >= 0; }
291 Iterator& operator++() {
296 }
while (m_cachedIndex < m_vector.m_end &&
abs(m_vector.m_buffer[m_cachedIndex]) <= m_epsilon);
297 if (m_cachedIndex < m_vector.m_end)
298 m_cachedValue = m_vector.m_buffer[m_cachedIndex];
302 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
304 m_currentEl = llElements[m_currentEl].next;
305 }
while (m_currentEl >= 0 &&
abs(llElements[m_currentEl].value) <= m_epsilon);
306 if (m_currentEl < 0) {
309 m_cachedIndex = llElements[m_currentEl].index;
310 m_cachedValue = llElements[m_currentEl].value;
317 const AmbiVector& m_vector;
318 StorageIndex m_currentEl;
319 RealScalar m_epsilon;
320 StorageIndex m_cachedIndex;
321 Scalar m_cachedValue;
Namespace containing all symbols from the Eigen library.
Definition Core:137
const Eigen::CwiseUnaryOp< Eigen::internal::scalar_abs_op< typename Derived::Scalar >, const Derived > abs(const Eigen::ArrayBase< Derived > &x)