Eigen  3.4.90 (git rev 5a9f66fb35d03a4da9ef8976e67a61b30aa16dcf)
 
Loading...
Searching...
No Matches
CompressedStorage.h
1// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008-2014 Gael Guennebaud <[email protected]>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_COMPRESSED_STORAGE_H
11#define EIGEN_COMPRESSED_STORAGE_H
12
13// IWYU pragma: private
14#include "./InternalHeaderCheck.h"
15
16namespace Eigen {
17
18namespace internal {
19
24template <typename Scalar_, typename StorageIndex_>
25class CompressedStorage {
26 public:
27 typedef Scalar_ Scalar;
28 typedef StorageIndex_ StorageIndex;
29
30 protected:
31 typedef typename NumTraits<Scalar>::Real RealScalar;
32
33 public:
34 CompressedStorage() : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0) {}
35
36 explicit CompressedStorage(Index size) : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0) { resize(size); }
37
38 CompressedStorage(const CompressedStorage& other) : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0) {
39 *this = other;
40 }
41
42 CompressedStorage& operator=(const CompressedStorage& other) {
43 resize(other.size());
44 if (other.size() > 0) {
45 internal::smart_copy(other.m_values, other.m_values + m_size, m_values);
46 internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
47 }
48 return *this;
49 }
50
51 void swap(CompressedStorage& other) {
52 std::swap(m_values, other.m_values);
53 std::swap(m_indices, other.m_indices);
54 std::swap(m_size, other.m_size);
55 std::swap(m_allocatedSize, other.m_allocatedSize);
56 }
57
58 ~CompressedStorage() {
59 conditional_aligned_delete_auto<Scalar, true>(m_values, m_allocatedSize);
60 conditional_aligned_delete_auto<StorageIndex, true>(m_indices, m_allocatedSize);
61 }
62
63 void reserve(Index size) {
64 Index newAllocatedSize = m_size + size;
65 if (newAllocatedSize > m_allocatedSize) reallocate(newAllocatedSize);
66 }
67
68 void squeeze() {
69 if (m_allocatedSize > m_size) reallocate(m_size);
70 }
71
72 void resize(Index size, double reserveSizeFactor = 0) {
73 if (m_allocatedSize < size) {
74 // Avoid underflow on the std::min<Index> call by choosing the smaller index type.
75 using SmallerIndexType =
76 typename std::conditional<static_cast<size_t>((std::numeric_limits<Index>::max)()) <
77 static_cast<size_t>((std::numeric_limits<StorageIndex>::max)()),
78 Index, StorageIndex>::type;
79 Index realloc_size =
80 (std::min<Index>)(NumTraits<SmallerIndexType>::highest(), size + Index(reserveSizeFactor * double(size)));
81 if (realloc_size < size) internal::throw_std_bad_alloc();
82 reallocate(realloc_size);
83 }
84 m_size = size;
85 }
86
87 void append(const Scalar& v, Index i) {
88 Index id = m_size;
89 resize(m_size + 1, 1);
90 m_values[id] = v;
91 m_indices[id] = internal::convert_index<StorageIndex>(i);
92 }
93
94 inline Index size() const { return m_size; }
95 inline Index allocatedSize() const { return m_allocatedSize; }
96 inline void clear() { m_size = 0; }
97
98 const Scalar* valuePtr() const { return m_values; }
99 Scalar* valuePtr() { return m_values; }
100 const StorageIndex* indexPtr() const { return m_indices; }
101 StorageIndex* indexPtr() { return m_indices; }
102
103 inline Scalar& value(Index i) {
104 eigen_internal_assert(m_values != 0);
105 return m_values[i];
106 }
107 inline const Scalar& value(Index i) const {
108 eigen_internal_assert(m_values != 0);
109 return m_values[i];
110 }
111
112 inline StorageIndex& index(Index i) {
113 eigen_internal_assert(m_indices != 0);
114 return m_indices[i];
115 }
116 inline const StorageIndex& index(Index i) const {
117 eigen_internal_assert(m_indices != 0);
118 return m_indices[i];
119 }
120
122 inline Index searchLowerIndex(Index key) const { return searchLowerIndex(0, m_size, key); }
123
125 inline Index searchLowerIndex(Index start, Index end, Index key) const {
126 return static_cast<Index>(std::distance(m_indices, std::lower_bound(m_indices + start, m_indices + end, key)));
127 }
128
131 inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const {
132 if (m_size == 0)
133 return defaultValue;
134 else if (key == m_indices[m_size - 1])
135 return m_values[m_size - 1];
136 // ^^ optimization: let's first check if it is the last coefficient
137 // (very common in high level algorithms)
138 const Index id = searchLowerIndex(0, m_size - 1, key);
139 return ((id < m_size) && (m_indices[id] == key)) ? m_values[id] : defaultValue;
140 }
141
143 inline Scalar atInRange(Index start, Index end, Index key, const Scalar& defaultValue = Scalar(0)) const {
144 if (start >= end)
145 return defaultValue;
146 else if (end > start && key == m_indices[end - 1])
147 return m_values[end - 1];
148 // ^^ optimization: let's first check if it is the last coefficient
149 // (very common in high level algorithms)
150 const Index id = searchLowerIndex(start, end - 1, key);
151 return ((id < end) && (m_indices[id] == key)) ? m_values[id] : defaultValue;
152 }
153
157 inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0)) {
158 Index id = searchLowerIndex(0, m_size, key);
159 if (id >= m_size || m_indices[id] != key) {
160 if (m_allocatedSize < m_size + 1) {
161 Index newAllocatedSize = 2 * (m_size + 1);
162 m_values = conditional_aligned_realloc_new_auto<Scalar, true>(m_values, newAllocatedSize, m_allocatedSize);
163 m_indices =
164 conditional_aligned_realloc_new_auto<StorageIndex, true>(m_indices, newAllocatedSize, m_allocatedSize);
165 m_allocatedSize = newAllocatedSize;
166 }
167 if (m_size > id) {
168 internal::smart_memmove(m_values + id, m_values + m_size, m_values + id + 1);
169 internal::smart_memmove(m_indices + id, m_indices + m_size, m_indices + id + 1);
170 }
171 m_size++;
172 m_indices[id] = internal::convert_index<StorageIndex>(key);
173 m_values[id] = defaultValue;
174 }
175 return m_values[id];
176 }
177
178 inline void moveChunk(Index from, Index to, Index chunkSize) {
179 eigen_internal_assert(chunkSize >= 0 && to + chunkSize <= m_size);
180 internal::smart_memmove(m_values + from, m_values + from + chunkSize, m_values + to);
181 internal::smart_memmove(m_indices + from, m_indices + from + chunkSize, m_indices + to);
182 }
183
184 protected:
185 inline void reallocate(Index size) {
186#ifdef EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
187 EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
188#endif
189 eigen_internal_assert(size != m_allocatedSize);
190 m_values = conditional_aligned_realloc_new_auto<Scalar, true>(m_values, size, m_allocatedSize);
191 m_indices = conditional_aligned_realloc_new_auto<StorageIndex, true>(m_indices, size, m_allocatedSize);
192 m_allocatedSize = size;
193 }
194
195 protected:
196 Scalar* m_values;
197 StorageIndex* m_indices;
198 Index m_size;
199 Index m_allocatedSize;
200};
201
202} // end namespace internal
203
204} // end namespace Eigen
205
206#endif // EIGEN_COMPRESSED_STORAGE_H
Namespace containing all symbols from the Eigen library.
Definition Core:137
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:83