22#ifndef EIGEN_MALLOC_ALREADY_ALIGNED
33#if defined(__GLIBC__) && ((__GLIBC__ >= 2 && __GLIBC_MINOR__ >= 8) || __GLIBC__ > 2) && defined(__LP64__) && \
34 !defined(__SANITIZE_ADDRESS__) && (EIGEN_DEFAULT_ALIGN_BYTES == 16)
35#define EIGEN_GLIBC_MALLOC_ALREADY_ALIGNED 1
37#define EIGEN_GLIBC_MALLOC_ALREADY_ALIGNED 0
44#if defined(__FreeBSD__) && !(EIGEN_ARCH_ARM || EIGEN_ARCH_MIPS) && (EIGEN_DEFAULT_ALIGN_BYTES == 16)
45#define EIGEN_FREEBSD_MALLOC_ALREADY_ALIGNED 1
47#define EIGEN_FREEBSD_MALLOC_ALREADY_ALIGNED 0
50#if (EIGEN_OS_MAC && (EIGEN_DEFAULT_ALIGN_BYTES == 16)) || (EIGEN_OS_WIN64 && (EIGEN_DEFAULT_ALIGN_BYTES == 16)) || \
51 EIGEN_GLIBC_MALLOC_ALREADY_ALIGNED || EIGEN_FREEBSD_MALLOC_ALREADY_ALIGNED
52#define EIGEN_MALLOC_ALREADY_ALIGNED 1
54#define EIGEN_MALLOC_ALREADY_ALIGNED 0
59#ifndef EIGEN_MALLOC_CHECK_THREAD_LOCAL
64#ifndef EIGEN_AVOID_THREAD_LOCAL
66#if ((EIGEN_COMP_GNUC) || __has_feature(cxx_thread_local) || EIGEN_COMP_MSVC >= 1900) && \
67 !defined(EIGEN_GPU_COMPILE_PHASE)
68#define EIGEN_MALLOC_CHECK_THREAD_LOCAL thread_local
70#define EIGEN_MALLOC_CHECK_THREAD_LOCAL
74#define EIGEN_MALLOC_CHECK_THREAD_LOCAL
80#include "../InternalHeaderCheck.h"
91EIGEN_DEVICE_FUNC
inline void check_that_malloc_is_allowed() {
92 eigen_assert(
false &&
"heap allocation is forbidden (EIGEN_NO_MALLOC is defined)");
94#elif defined EIGEN_RUNTIME_NO_MALLOC
95EIGEN_DEVICE_FUNC
inline bool is_malloc_allowed_impl(
bool update,
bool new_value =
false) {
96 EIGEN_MALLOC_CHECK_THREAD_LOCAL
static bool value =
true;
97 if (update == 1) value = new_value;
100EIGEN_DEVICE_FUNC
inline bool is_malloc_allowed() {
return is_malloc_allowed_impl(
false); }
101EIGEN_DEVICE_FUNC
inline bool set_is_malloc_allowed(
bool new_value) {
return is_malloc_allowed_impl(
true, new_value); }
102EIGEN_DEVICE_FUNC
inline void check_that_malloc_is_allowed() {
103 eigen_assert(is_malloc_allowed() &&
104 "heap allocation is forbidden (EIGEN_RUNTIME_NO_MALLOC is defined and g_is_malloc_allowed is false)");
107EIGEN_DEVICE_FUNC
inline void check_that_malloc_is_allowed() {}
110EIGEN_DEVICE_FUNC
inline void throw_std_bad_alloc() {
111#ifdef EIGEN_EXCEPTIONS
112 throw std::bad_alloc();
114 std::size_t huge =
static_cast<std::size_t
>(-1);
115#if defined(EIGEN_HIPCC)
127 void* unused = ::operator
new(huge);
128 EIGEN_UNUSED_VARIABLE(unused);
142EIGEN_DEVICE_FUNC
inline void* handmade_aligned_malloc(std::size_t size,
143 std::size_t alignment = EIGEN_DEFAULT_ALIGN_BYTES) {
144 eigen_assert(alignment >=
sizeof(
void*) && alignment <= 128 && (alignment & (alignment - 1)) == 0 &&
145 "Alignment must be at least sizeof(void*), less than or equal to 128, and a power of 2");
147 check_that_malloc_is_allowed();
148 EIGEN_USING_STD(malloc)
149 void* original = malloc(size + alignment);
150 if (original == 0)
return 0;
151 uint8_t offset =
static_cast<uint8_t
>(alignment - (
reinterpret_cast<std::size_t
>(original) & (alignment - 1)));
152 void* aligned =
static_cast<void*
>(
static_cast<uint8_t*
>(original) + offset);
153 *(
static_cast<uint8_t*
>(aligned) - 1) = offset;
158EIGEN_DEVICE_FUNC
inline void handmade_aligned_free(
void* ptr) {
159 if (ptr !=
nullptr) {
160 uint8_t offset =
static_cast<uint8_t
>(*(
static_cast<uint8_t*
>(ptr) - 1));
161 void* original =
static_cast<void*
>(
static_cast<uint8_t*
>(ptr) - offset);
163 check_that_malloc_is_allowed();
164 EIGEN_USING_STD(free)
174EIGEN_DEVICE_FUNC
inline void* handmade_aligned_realloc(
void* ptr, std::size_t new_size, std::size_t old_size,
175 std::size_t alignment = EIGEN_DEFAULT_ALIGN_BYTES) {
176 if (ptr ==
nullptr)
return handmade_aligned_malloc(new_size, alignment);
177 uint8_t old_offset = *(
static_cast<uint8_t*
>(ptr) - 1);
178 void* old_original =
static_cast<uint8_t*
>(ptr) - old_offset;
180 check_that_malloc_is_allowed();
181 EIGEN_USING_STD(realloc)
182 void* original = realloc(old_original, new_size + alignment);
183 if (original ==
nullptr)
return nullptr;
184 if (original == old_original)
return ptr;
185 uint8_t offset =
static_cast<uint8_t
>(alignment - (
reinterpret_cast<std::size_t
>(original) & (alignment - 1)));
186 void* aligned =
static_cast<void*
>(
static_cast<uint8_t*
>(original) + offset);
187 if (offset != old_offset) {
188 const void* src =
static_cast<const void*
>(
static_cast<uint8_t*
>(original) + old_offset);
189 std::size_t count = (std::min)(new_size, old_size);
190 std::memmove(aligned, src, count);
192 *(
static_cast<uint8_t*
>(aligned) - 1) = offset;
199EIGEN_DEVICE_FUNC
inline void* aligned_malloc(std::size_t size) {
200 if (size == 0)
return nullptr;
203#if (EIGEN_DEFAULT_ALIGN_BYTES == 0) || EIGEN_MALLOC_ALREADY_ALIGNED
205 check_that_malloc_is_allowed();
206 EIGEN_USING_STD(malloc)
207 result = malloc(size);
209#if EIGEN_DEFAULT_ALIGN_BYTES == 16
210 eigen_assert((size < 16 || (std::size_t(result) % 16) == 0) &&
211 "System's malloc returned an unaligned pointer. Compile with EIGEN_MALLOC_ALREADY_ALIGNED=0 to fallback "
212 "to handmade aligned memory allocator.");
215 result = handmade_aligned_malloc(size);
218 if (!result && size) throw_std_bad_alloc();
224EIGEN_DEVICE_FUNC
inline void aligned_free(
void* ptr) {
225#if (EIGEN_DEFAULT_ALIGN_BYTES == 0) || EIGEN_MALLOC_ALREADY_ALIGNED
227 if (ptr !=
nullptr) {
228 check_that_malloc_is_allowed();
229 EIGEN_USING_STD(free)
234 handmade_aligned_free(ptr);
243EIGEN_DEVICE_FUNC
inline void* aligned_realloc(
void* ptr, std::size_t new_size, std::size_t old_size) {
244 if (ptr ==
nullptr)
return aligned_malloc(new_size);
245 if (old_size == new_size)
return ptr;
252#if (EIGEN_DEFAULT_ALIGN_BYTES == 0) || EIGEN_MALLOC_ALREADY_ALIGNED
253 EIGEN_UNUSED_VARIABLE(old_size)
255 check_that_malloc_is_allowed();
256 EIGEN_USING_STD(realloc)
257 result = realloc(ptr, new_size);
259 result = handmade_aligned_realloc(ptr, new_size, old_size);
262 if (!result && new_size) throw_std_bad_alloc();
275EIGEN_DEVICE_FUNC
inline void* conditional_aligned_malloc(std::size_t size) {
276 return aligned_malloc(size);
280EIGEN_DEVICE_FUNC
inline void* conditional_aligned_malloc<false>(std::size_t size) {
281 if (size == 0)
return nullptr;
283 check_that_malloc_is_allowed();
284 EIGEN_USING_STD(malloc)
285 void* result = malloc(size);
287 if (!result && size) throw_std_bad_alloc();
293EIGEN_DEVICE_FUNC
inline void conditional_aligned_free(
void* ptr) {
298EIGEN_DEVICE_FUNC
inline void conditional_aligned_free<false>(
void* ptr) {
299 if (ptr !=
nullptr) {
300 check_that_malloc_is_allowed();
301 EIGEN_USING_STD(free)
307EIGEN_DEVICE_FUNC
inline void* conditional_aligned_realloc(
void* ptr, std::size_t new_size, std::size_t old_size) {
308 return aligned_realloc(ptr, new_size, old_size);
312EIGEN_DEVICE_FUNC
inline void* conditional_aligned_realloc<false>(
void* ptr, std::size_t new_size,
313 std::size_t old_size) {
314 if (ptr ==
nullptr)
return conditional_aligned_malloc<false>(new_size);
315 if (old_size == new_size)
return ptr;
317 conditional_aligned_free<false>(ptr);
321 check_that_malloc_is_allowed();
322 EIGEN_USING_STD(realloc)
323 return realloc(ptr, new_size);
334EIGEN_DEVICE_FUNC
inline void destruct_elements_of_array(T* ptr, std::size_t size) {
337 while (size) ptr[--size].~T();
344EIGEN_DEVICE_FUNC
inline T* default_construct_elements_of_array(T* ptr, std::size_t size) {
347 for (i = 0; i < size; ++i) ::new (ptr + i) T;
350 destruct_elements_of_array(ptr, i);
360EIGEN_DEVICE_FUNC
inline T* copy_construct_elements_of_array(T* ptr,
const T* src, std::size_t size) {
363 for (i = 0; i < size; ++i) ::new (ptr + i) T(*(src + i));
366 destruct_elements_of_array(ptr, i);
376EIGEN_DEVICE_FUNC
inline T* move_construct_elements_of_array(T* ptr, T* src, std::size_t size) {
379 for (i = 0; i < size; ++i) ::new (ptr + i) T(std::move(*(src + i)));
382 destruct_elements_of_array(ptr, i);
393EIGEN_DEVICE_FUNC EIGEN_ALWAYS_INLINE
void check_size_for_overflow(std::size_t size) {
394 if (size > std::size_t(-1) /
sizeof(T)) throw_std_bad_alloc();
402EIGEN_DEVICE_FUNC
inline T* aligned_new(std::size_t size) {
403 check_size_for_overflow<T>(size);
404 T* result =
static_cast<T*
>(aligned_malloc(
sizeof(T) * size));
405 EIGEN_TRY {
return default_construct_elements_of_array(result, size); }
407 aligned_free(result);
413template <
typename T,
bool Align>
414EIGEN_DEVICE_FUNC
inline T* conditional_aligned_new(std::size_t size) {
415 check_size_for_overflow<T>(size);
416 T* result =
static_cast<T*
>(conditional_aligned_malloc<Align>(
sizeof(T) * size));
417 EIGEN_TRY {
return default_construct_elements_of_array(result, size); }
419 conditional_aligned_free<Align>(result);
429EIGEN_DEVICE_FUNC
inline void aligned_delete(T* ptr, std::size_t size) {
430 destruct_elements_of_array<T>(ptr, size);
437template <
typename T,
bool Align>
438EIGEN_DEVICE_FUNC
inline void conditional_aligned_delete(T* ptr, std::size_t size) {
439 destruct_elements_of_array<T>(ptr, size);
440 conditional_aligned_free<Align>(ptr);
443template <
typename T,
bool Align>
444EIGEN_DEVICE_FUNC
inline T* conditional_aligned_realloc_new(T* pts, std::size_t new_size, std::size_t old_size) {
445 check_size_for_overflow<T>(new_size);
446 check_size_for_overflow<T>(old_size);
452 T* result =
static_cast<T*
>(conditional_aligned_malloc<Align>(
sizeof(T) * new_size));
455 std::size_t copy_size = (std::min)(old_size, new_size);
456 move_construct_elements_of_array(result, pts, copy_size);
459 if (new_size > old_size) {
460 default_construct_elements_of_array(result + copy_size, new_size - old_size);
464 conditional_aligned_delete<T, Align>(pts, old_size);
467 conditional_aligned_free<Align>(result);
474template <
typename T,
bool Align>
475EIGEN_DEVICE_FUNC
inline T* conditional_aligned_new_auto(std::size_t size) {
476 if (size == 0)
return 0;
477 check_size_for_overflow<T>(size);
478 T* result =
static_cast<T*
>(conditional_aligned_malloc<Align>(
sizeof(T) * size));
479 if (NumTraits<T>::RequireInitialization) {
480 EIGEN_TRY { default_construct_elements_of_array(result, size); }
482 conditional_aligned_free<Align>(result);
489template <
typename T,
bool Align>
490EIGEN_DEVICE_FUNC
inline T* conditional_aligned_realloc_new_auto(T* pts, std::size_t new_size, std::size_t old_size) {
491 if (NumTraits<T>::RequireInitialization) {
492 return conditional_aligned_realloc_new<T, Align>(pts, new_size, old_size);
495 check_size_for_overflow<T>(new_size);
496 check_size_for_overflow<T>(old_size);
497 return static_cast<T*
>(
498 conditional_aligned_realloc<Align>(
static_cast<void*
>(pts),
sizeof(T) * new_size,
sizeof(T) * old_size));
501template <
typename T,
bool Align>
502EIGEN_DEVICE_FUNC
inline void conditional_aligned_delete_auto(T* ptr, std::size_t size) {
503 if (NumTraits<T>::RequireInitialization) destruct_elements_of_array<T>(ptr, size);
504 conditional_aligned_free<Align>(ptr);
527template <
int Alignment,
typename Scalar,
typename Index>
528EIGEN_DEVICE_FUNC
inline Index first_aligned(
const Scalar* array, Index size) {
529 const Index ScalarSize =
sizeof(Scalar);
530 const Index AlignmentSize = Alignment / ScalarSize;
531 const Index AlignmentMask = AlignmentSize - 1;
533 if (AlignmentSize <= 1) {
537 }
else if ((std::uintptr_t(array) & (
sizeof(Scalar) - 1)) || (Alignment % ScalarSize) != 0) {
542 Index first = (AlignmentSize - (
Index((std::uintptr_t(array) /
sizeof(Scalar))) & AlignmentMask)) & AlignmentMask;
543 return (first < size) ? first : size;
549template <
typename Scalar,
typename Index>
550EIGEN_DEVICE_FUNC
inline Index first_default_aligned(
const Scalar* array, Index size) {
551 typedef typename packet_traits<Scalar>::type DefaultPacketType;
552 return first_aligned<unpacket_traits<DefaultPacketType>::alignment>(array, size);
557template <
typename Index>
558inline Index first_multiple(Index size, Index base) {
559 return ((size + base - 1) / base) * base;
564template <
typename T,
bool UseMemcpy>
565struct smart_copy_helper;
568EIGEN_DEVICE_FUNC
void smart_copy(
const T* start,
const T* end, T* target) {
569 smart_copy_helper<T, !NumTraits<T>::RequireInitialization>::run(start, end, target);
573struct smart_copy_helper<T, true> {
574 EIGEN_DEVICE_FUNC
static inline void run(
const T* start,
const T* end, T* target) {
575 std::intptr_t size = std::intptr_t(end) - std::intptr_t(start);
576 if (size == 0)
return;
577 eigen_internal_assert(start != 0 && end != 0 && target != 0);
578 EIGEN_USING_STD(memcpy)
579 memcpy(target, start, size);
584struct smart_copy_helper<T, false> {
585 EIGEN_DEVICE_FUNC
static inline void run(
const T* start,
const T* end, T* target) { std::copy(start, end, target); }
589template <
typename T,
bool UseMemmove>
590struct smart_memmove_helper;
593void smart_memmove(
const T* start,
const T* end, T* target) {
594 smart_memmove_helper<T, !NumTraits<T>::RequireInitialization>::run(start, end, target);
598struct smart_memmove_helper<T, true> {
599 static inline void run(
const T* start,
const T* end, T* target) {
600 std::intptr_t size = std::intptr_t(end) - std::intptr_t(start);
601 if (size == 0)
return;
602 eigen_internal_assert(start != 0 && end != 0 && target != 0);
603 std::memmove(target, start, size);
608struct smart_memmove_helper<T, false> {
609 static inline void run(
const T* start,
const T* end, T* target) {
610 if (std::uintptr_t(target) < std::uintptr_t(start)) {
611 std::copy(start, end, target);
613 std::ptrdiff_t count = (std::ptrdiff_t(end) - std::ptrdiff_t(start)) /
sizeof(T);
614 std::copy_backward(start, end, target + count);
620EIGEN_DEVICE_FUNC T* smart_move(T* start, T* end, T* target) {
621 return std::move(start, end, target);
630#if !defined EIGEN_ALLOCA && !defined EIGEN_GPU_COMPILE_PHASE
631#if EIGEN_OS_LINUX || EIGEN_OS_MAC || (defined alloca)
632#define EIGEN_ALLOCA alloca
634#define EIGEN_ALLOCA _alloca
643#if defined(__clang__) && defined(__thumb__)
650class aligned_stack_memory_handler : noncopyable {
658 EIGEN_DEVICE_FUNC aligned_stack_memory_handler(T* ptr, std::size_t size,
bool dealloc)
659 : m_ptr(ptr), m_size(size), m_deallocate(dealloc) {
660 if (NumTraits<T>::RequireInitialization && m_ptr) Eigen::internal::default_construct_elements_of_array(m_ptr, size);
662 EIGEN_DEVICE_FUNC ~aligned_stack_memory_handler() {
663 if (NumTraits<T>::RequireInitialization && m_ptr) Eigen::internal::destruct_elements_of_array<T>(m_ptr, m_size);
664 if (m_deallocate) Eigen::internal::aligned_free(m_ptr);
675template <
typename Xpr,
int NbEvaluations,
676 bool MapExternalBuffer = nested_eval<Xpr, NbEvaluations>::Evaluate && Xpr::MaxSizeAtCompileTime ==
Dynamic>
677struct local_nested_eval_wrapper {
678 static constexpr bool NeedExternalBuffer =
false;
679 typedef typename Xpr::Scalar Scalar;
680 typedef typename nested_eval<Xpr, NbEvaluations>::type ObjectType;
683 EIGEN_DEVICE_FUNC local_nested_eval_wrapper(
const Xpr& xpr, Scalar* ptr) : object(xpr) {
684 EIGEN_UNUSED_VARIABLE(ptr);
685 eigen_internal_assert(ptr == 0);
689template <
typename Xpr,
int NbEvaluations>
690struct local_nested_eval_wrapper<Xpr, NbEvaluations, true> {
691 static constexpr bool NeedExternalBuffer =
true;
692 typedef typename Xpr::Scalar Scalar;
693 typedef typename plain_object_eval<Xpr>::type PlainObject;
694 typedef Map<PlainObject, EIGEN_DEFAULT_ALIGN_BYTES> ObjectType;
697 EIGEN_DEVICE_FUNC local_nested_eval_wrapper(
const Xpr& xpr, Scalar* ptr)
698 : object(ptr == 0 ? reinterpret_cast<Scalar*>(
Eigen::internal::aligned_malloc(sizeof(Scalar) * xpr.size())) : ptr,
699 xpr.rows(), xpr.cols()),
700 m_deallocate(ptr == 0) {
701 if (NumTraits<Scalar>::RequireInitialization &&
object.data())
702 Eigen::internal::default_construct_elements_of_array(
object.data(),
object.size());
706 EIGEN_DEVICE_FUNC ~local_nested_eval_wrapper() {
707 if (NumTraits<Scalar>::RequireInitialization &&
object.data())
708 Eigen::internal::destruct_elements_of_array(
object.data(),
object.size());
709 if (m_deallocate) Eigen::internal::aligned_free(
object.data());
719class scoped_array : noncopyable {
723 explicit scoped_array(std::ptrdiff_t size) { m_ptr =
new T[size]; }
724 ~scoped_array() {
delete[] m_ptr; }
725 T& operator[](std::ptrdiff_t i) {
return m_ptr[i]; }
726 const T& operator[](std::ptrdiff_t i)
const {
return m_ptr[i]; }
727 T*& ptr() {
return m_ptr; }
728 const T* ptr()
const {
return m_ptr; }
729 operator const T*()
const {
return m_ptr; }
733void swap(scoped_array<T>& a, scoped_array<T>& b) {
734 std::swap(a.ptr(), b.ptr());
764#if EIGEN_DEFAULT_ALIGN_BYTES > 0
768#if (EIGEN_COMP_GNUC || EIGEN_COMP_CLANG)
769#define EIGEN_ALIGNED_ALLOCA(SIZE) __builtin_alloca_with_align(SIZE, CHAR_BIT* EIGEN_DEFAULT_ALIGN_BYTES)
771EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
void* eigen_aligned_alloca_helper(
void* ptr) {
772 constexpr std::uintptr_t mask = EIGEN_DEFAULT_ALIGN_BYTES - 1;
773 std::uintptr_t ptr_int = std::uintptr_t(ptr);
774 std::uintptr_t aligned_ptr_int = (ptr_int + mask) & ~mask;
775 std::uintptr_t offset = aligned_ptr_int - ptr_int;
776 return static_cast<void*
>(
static_cast<uint8_t*
>(ptr) + offset);
778#define EIGEN_ALIGNED_ALLOCA(SIZE) eigen_aligned_alloca_helper(EIGEN_ALLOCA(SIZE + EIGEN_DEFAULT_ALIGN_BYTES - 1))
782#define EIGEN_ALIGNED_ALLOCA(SIZE) EIGEN_ALLOCA(SIZE)
785#define ei_declare_aligned_stack_constructed_variable(TYPE, NAME, SIZE, BUFFER) \
786 Eigen::internal::check_size_for_overflow<TYPE>(SIZE); \
787 TYPE* NAME = (BUFFER) != 0 ? (BUFFER) \
788 : reinterpret_cast<TYPE*>((sizeof(TYPE) * SIZE <= EIGEN_STACK_ALLOCATION_LIMIT) \
789 ? EIGEN_ALIGNED_ALLOCA(sizeof(TYPE) * SIZE) \
790 : Eigen::internal::aligned_malloc(sizeof(TYPE) * SIZE)); \
791 Eigen::internal::aligned_stack_memory_handler<TYPE> EIGEN_CAT(NAME, _stack_memory_destructor)( \
792 (BUFFER) == 0 ? NAME : 0, SIZE, sizeof(TYPE) * SIZE > EIGEN_STACK_ALLOCATION_LIMIT)
794#define ei_declare_local_nested_eval(XPR_T, XPR, N, NAME) \
795 Eigen::internal::local_nested_eval_wrapper<XPR_T, N> EIGEN_CAT(NAME, _wrapper)( \
796 XPR, reinterpret_cast<typename XPR_T::Scalar*>( \
797 ((Eigen::internal::local_nested_eval_wrapper<XPR_T, N>::NeedExternalBuffer) && \
798 ((sizeof(typename XPR_T::Scalar) * XPR.size()) <= EIGEN_STACK_ALLOCATION_LIMIT)) \
799 ? EIGEN_ALIGNED_ALLOCA(sizeof(typename XPR_T::Scalar) * XPR.size()) \
801 typename Eigen::internal::local_nested_eval_wrapper<XPR_T, N>::ObjectType NAME(EIGEN_CAT(NAME, _wrapper).object)
805#define ei_declare_aligned_stack_constructed_variable(TYPE, NAME, SIZE, BUFFER) \
806 Eigen::internal::check_size_for_overflow<TYPE>(SIZE); \
807 TYPE* NAME = (BUFFER) != 0 ? BUFFER : reinterpret_cast<TYPE*>(Eigen::internal::aligned_malloc(sizeof(TYPE) * SIZE)); \
808 Eigen::internal::aligned_stack_memory_handler<TYPE> EIGEN_CAT(NAME, _stack_memory_destructor)( \
809 (BUFFER) == 0 ? NAME : 0, SIZE, true)
811#define ei_declare_local_nested_eval(XPR_T, XPR, N, NAME) \
812 typename Eigen::internal::nested_eval<XPR_T, N>::type NAME(XPR)
820#if EIGEN_HAS_CXX17_OVERALIGN
824#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW_NOTHROW(NeedsToAlign)
825#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW_IF(NeedsToAlign)
826#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW
827#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW_IF_VECTORIZABLE_FIXED_SIZE(Scalar, Size)
832#if EIGEN_MAX_ALIGN_BYTES != 0 && !defined(EIGEN_HIP_DEVICE_COMPILE)
833#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW_NOTHROW(NeedsToAlign) \
834 EIGEN_DEVICE_FUNC void* operator new(std::size_t size, const std::nothrow_t&) EIGEN_NO_THROW { \
835 EIGEN_TRY { return Eigen::internal::conditional_aligned_malloc<NeedsToAlign>(size); } \
836 EIGEN_CATCH(...) { return 0; } \
838#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW_IF(NeedsToAlign) \
839 EIGEN_DEVICE_FUNC void* operator new(std::size_t size) { \
840 return Eigen::internal::conditional_aligned_malloc<NeedsToAlign>(size); \
842 EIGEN_DEVICE_FUNC void* operator new[](std::size_t size) { \
843 return Eigen::internal::conditional_aligned_malloc<NeedsToAlign>(size); \
845 EIGEN_DEVICE_FUNC void operator delete(void* ptr) EIGEN_NO_THROW { \
846 Eigen::internal::conditional_aligned_free<NeedsToAlign>(ptr); \
848 EIGEN_DEVICE_FUNC void operator delete[](void* ptr) EIGEN_NO_THROW { \
849 Eigen::internal::conditional_aligned_free<NeedsToAlign>(ptr); \
851 EIGEN_DEVICE_FUNC void operator delete(void* ptr, std::size_t ) EIGEN_NO_THROW { \
852 Eigen::internal::conditional_aligned_free<NeedsToAlign>(ptr); \
854 EIGEN_DEVICE_FUNC void operator delete[](void* ptr, std::size_t ) EIGEN_NO_THROW { \
855 Eigen::internal::conditional_aligned_free<NeedsToAlign>(ptr); \
860 EIGEN_DEVICE_FUNC static void* operator new(std::size_t size, void* ptr) { return ::operator new(size, ptr); } \
861 EIGEN_DEVICE_FUNC static void* operator new[](std::size_t size, void* ptr) { return ::operator new[](size, ptr); } \
862 EIGEN_DEVICE_FUNC void operator delete(void* memory, void* ptr) EIGEN_NO_THROW { \
863 return ::operator delete(memory, ptr); \
865 EIGEN_DEVICE_FUNC void operator delete[](void* memory, void* ptr) EIGEN_NO_THROW { \
866 return ::operator delete[](memory, ptr); \
869 EIGEN_MAKE_ALIGNED_OPERATOR_NEW_NOTHROW(NeedsToAlign) \
870 EIGEN_DEVICE_FUNC void operator delete(void* ptr, const std::nothrow_t&) EIGEN_NO_THROW { \
871 Eigen::internal::conditional_aligned_free<NeedsToAlign>(ptr); \
873 typedef void eigen_aligned_operator_new_marker_type;
875#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW_IF(NeedsToAlign)
878#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW_IF(true)
879#define EIGEN_MAKE_ALIGNED_OPERATOR_NEW_IF_VECTORIZABLE_FIXED_SIZE(Scalar, Size) \
880 EIGEN_MAKE_ALIGNED_OPERATOR_NEW_IF( \
881 bool(((Size) != Eigen::Dynamic) && \
882 (((EIGEN_MAX_ALIGN_BYTES >= 16) && ((sizeof(Scalar) * (Size)) % (EIGEN_MAX_ALIGN_BYTES) == 0)) || \
883 ((EIGEN_MAX_ALIGN_BYTES >= 32) && ((sizeof(Scalar) * (Size)) % (EIGEN_MAX_ALIGN_BYTES / 2) == 0)) || \
884 ((EIGEN_MAX_ALIGN_BYTES >= 64) && ((sizeof(Scalar) * (Size)) % (EIGEN_MAX_ALIGN_BYTES / 4) == 0)))))
917 typedef std::size_t size_type;
918 typedef std::ptrdiff_t difference_type;
920 typedef const T* const_pointer;
921 typedef T& reference;
922 typedef const T& const_reference;
923 typedef T value_type;
939#if EIGEN_COMP_GNUC_STRICT && EIGEN_GNUC_STRICT_AT_LEAST(7, 0, 0)
943 size_type max_size()
const {
return (std::numeric_limits<std::ptrdiff_t>::max)() /
sizeof(T); }
946 pointer allocate(size_type num,
const void* = 0) {
947 internal::check_size_for_overflow<T>(num);
948 return static_cast<pointer
>(internal::aligned_malloc(num *
sizeof(T)));
951 void deallocate(pointer p, size_type ) { internal::aligned_free(p); }
956#if !defined(EIGEN_NO_CPUID)
957#if EIGEN_COMP_GNUC && EIGEN_ARCH_i386_OR_x86_64
958#if defined(__PIC__) && EIGEN_ARCH_i386
960#define EIGEN_CPUID(abcd, func, id) \
961 __asm__ __volatile__("xchgl %%ebx, %k1;cpuid; xchgl %%ebx,%k1" \
962 : "=a"(abcd[0]), "=&r"(abcd[1]), "=c"(abcd[2]), "=d"(abcd[3]) \
963 : "a"(func), "c"(id));
964#elif defined(__PIC__) && EIGEN_ARCH_x86_64
968#define EIGEN_CPUID(abcd, func, id) \
969 __asm__ __volatile__("xchg{q}\t{%%}rbx, %q1; cpuid; xchg{q}\t{%%}rbx, %q1" \
970 : "=a"(abcd[0]), "=&r"(abcd[1]), "=c"(abcd[2]), "=d"(abcd[3]) \
971 : "0"(func), "2"(id));
974#define EIGEN_CPUID(abcd, func, id) \
975 __asm__ __volatile__("cpuid" : "=a"(abcd[0]), "=b"(abcd[1]), "=c"(abcd[2]), "=d"(abcd[3]) : "0"(func), "2"(id));
978#if EIGEN_ARCH_i386_OR_x86_64
979#define EIGEN_CPUID(abcd, func, id) __cpuidex((int*)abcd, func, id)
988inline bool cpuid_is_vendor(
int abcd[4],
const int vendor[3]) {
989 return abcd[1] == vendor[0] && abcd[3] == vendor[1] && abcd[2] == vendor[2];
992inline void queryCacheSizes_intel_direct(
int& l1,
int& l2,
int& l3) {
998 abcd[0] = abcd[1] = abcd[2] = abcd[3] = 0;
999 EIGEN_CPUID(abcd, 0x4, cache_id);
1000 cache_type = (abcd[0] & 0x0F) >> 0;
1001 if (cache_type == 1 || cache_type == 3)
1003 int cache_level = (abcd[0] & 0xE0) >> 5;
1004 int ways = (abcd[1] & 0xFFC00000) >> 22;
1005 int partitions = (abcd[1] & 0x003FF000) >> 12;
1006 int line_size = (abcd[1] & 0x00000FFF) >> 0;
1007 int sets = (abcd[2]);
1009 int cache_size = (ways + 1) * (partitions + 1) * (line_size + 1) * (sets + 1);
1011 switch (cache_level) {
1026 }
while (cache_type > 0 && cache_id < 16);
1029inline void queryCacheSizes_intel_codes(
int& l1,
int& l2,
int& l3) {
1031 abcd[0] = abcd[1] = abcd[2] = abcd[3] = 0;
1033 EIGEN_CPUID(abcd, 0x00000002, 0);
1034 unsigned char* bytes =
reinterpret_cast<unsigned char*
>(abcd) + 2;
1035 bool check_for_p2_core2 =
false;
1036 for (
int i = 0; i < 14; ++i) {
1135 check_for_p2_core2 =
true;
1219 if (check_for_p2_core2 && l2 == l3) l3 = 0;
1225inline void queryCacheSizes_intel(
int& l1,
int& l2,
int& l3,
int max_std_funcs) {
1226 if (max_std_funcs >= 4)
1227 queryCacheSizes_intel_direct(l1, l2, l3);
1228 else if (max_std_funcs >= 2)
1229 queryCacheSizes_intel_codes(l1, l2, l3);
1234inline void queryCacheSizes_amd(
int& l1,
int& l2,
int& l3) {
1236 abcd[0] = abcd[1] = abcd[2] = abcd[3] = 0;
1239 EIGEN_CPUID(abcd, 0x80000000, 0);
1240 if (
static_cast<numext::uint32_t
>(abcd[0]) >=
static_cast<numext::uint32_t
>(0x80000006)) {
1241 EIGEN_CPUID(abcd, 0x80000005, 0);
1242 l1 = (abcd[2] >> 24) * 1024;
1243 abcd[0] = abcd[1] = abcd[2] = abcd[3] = 0;
1244 EIGEN_CPUID(abcd, 0x80000006, 0);
1245 l2 = (abcd[2] >> 16) * 1024;
1246 l3 = ((abcd[3] & 0xFFFC000) >> 18) * 512 * 1024;
1255inline void queryCacheSizes(
int& l1,
int& l2,
int& l3) {
1258 const int GenuineIntel[] = {0x756e6547, 0x49656e69, 0x6c65746e};
1259 const int AuthenticAMD[] = {0x68747541, 0x69746e65, 0x444d4163};
1260 const int AMDisbetter_[] = {0x69444d41, 0x74656273, 0x21726574};
1263 EIGEN_CPUID(abcd, 0x0, 0);
1264 int max_std_funcs = abcd[0];
1265 if (cpuid_is_vendor(abcd, GenuineIntel))
1266 queryCacheSizes_intel(l1, l2, l3, max_std_funcs);
1267 else if (cpuid_is_vendor(abcd, AuthenticAMD) || cpuid_is_vendor(abcd, AMDisbetter_))
1268 queryCacheSizes_amd(l1, l2, l3);
1271 queryCacheSizes_intel(l1, l2, l3, max_std_funcs);
1291inline int queryL1CacheSize() {
1293 queryCacheSizes(l1, l2, l3);
1299inline int queryTopLevelCacheSize() {
1300 int l1, l2(-1), l3(-1);
1301 queryCacheSizes(l1, l2, l3);
1302 return (std::max)(l2, l3);
1309#if EIGEN_COMP_CXXVER >= 20
1310using std::construct_at;
1312template <
class T,
class... Args>
1313EIGEN_DEVICE_FUNC T* construct_at(T* p, Args&&... args) {
1314 return ::new (
const_cast<void*
>(
static_cast<const volatile void*
>(p))) T(std::forward<Args>(args)...);
1323#if EIGEN_COMP_CXXVER >= 17
1324using std::destroy_at;
1327EIGEN_DEVICE_FUNC
void destroy_at(T* p) {
STL compatible allocator to use with types requiring a non-standard alignment.
Definition Memory.h:915
Namespace containing all symbols from the Eigen library.
Definition Core:137
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition Meta.h:83
const int Dynamic
Definition Constants.h:25