10#ifndef EIGEN_PACKET_MATH_SVE_H
11#define EIGEN_PACKET_MATH_SVE_H
14#include "../../InternalHeaderCheck.h"
18#ifndef EIGEN_CACHEFRIENDLY_PRODUCT_THRESHOLD
19#define EIGEN_CACHEFRIENDLY_PRODUCT_THRESHOLD 8
22#ifndef EIGEN_HAS_SINGLE_INSTRUCTION_MADD
23#define EIGEN_HAS_SINGLE_INSTRUCTION_MADD
26#define EIGEN_ARCH_DEFAULT_NUMBER_OF_REGISTERS 32
28template <
typename Scalar,
int SVEVectorLength>
29struct sve_packet_size_selector {
30 enum { size = SVEVectorLength / (
sizeof(Scalar) * CHAR_BIT) };
34typedef svint32_t PacketXi __attribute__((arm_sve_vector_bits(EIGEN_ARM64_SVE_VL)));
37struct packet_traits<numext::int32_t> : default_packet_traits {
38 typedef PacketXi type;
39 typedef PacketXi half;
43 size = sve_packet_size_selector<numext::int32_t, EIGEN_ARM64_SVE_VL>::size,
63struct unpacket_traits<PacketXi> {
64 typedef numext::int32_t type;
65 typedef PacketXi half;
67 size = sve_packet_size_selector<numext::int32_t, EIGEN_ARM64_SVE_VL>::size,
70 masked_load_available =
false,
71 masked_store_available =
false
76EIGEN_STRONG_INLINE
void prefetch<numext::int32_t>(
const numext::int32_t* addr) {
77 svprfw(svptrue_b32(), addr, SV_PLDL1KEEP);
81EIGEN_STRONG_INLINE PacketXi pset1<PacketXi>(
const numext::int32_t& from) {
82 return svdup_n_s32(from);
86EIGEN_STRONG_INLINE PacketXi plset<PacketXi>(
const numext::int32_t& a) {
87 numext::int32_t c[packet_traits<numext::int32_t>::size];
88 for (
int i = 0; i < packet_traits<numext::int32_t>::size; i++) c[i] = i;
89 return svadd_s32_z(svptrue_b32(), pset1<PacketXi>(a), svld1_s32(svptrue_b32(), c));
93EIGEN_STRONG_INLINE PacketXi padd<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
94 return svadd_s32_z(svptrue_b32(), a, b);
98EIGEN_STRONG_INLINE PacketXi psub<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
99 return svsub_s32_z(svptrue_b32(), a, b);
103EIGEN_STRONG_INLINE PacketXi pnegate(
const PacketXi& a) {
104 return svneg_s32_z(svptrue_b32(), a);
108EIGEN_STRONG_INLINE PacketXi pconj(
const PacketXi& a) {
113EIGEN_STRONG_INLINE PacketXi pmul<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
114 return svmul_s32_z(svptrue_b32(), a, b);
118EIGEN_STRONG_INLINE PacketXi pdiv<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
119 return svdiv_s32_z(svptrue_b32(), a, b);
123EIGEN_STRONG_INLINE PacketXi pmadd(
const PacketXi& a,
const PacketXi& b,
const PacketXi& c) {
124 return svmla_s32_z(svptrue_b32(), c, a, b);
128EIGEN_STRONG_INLINE PacketXi pmin<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
129 return svmin_s32_z(svptrue_b32(), a, b);
133EIGEN_STRONG_INLINE PacketXi pmax<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
134 return svmax_s32_z(svptrue_b32(), a, b);
138EIGEN_STRONG_INLINE PacketXi pcmp_le<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
139 return svdup_n_s32_z(svcmple_s32(svptrue_b32(), a, b), 0xffffffffu);
143EIGEN_STRONG_INLINE PacketXi pcmp_lt<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
144 return svdup_n_s32_z(svcmplt_s32(svptrue_b32(), a, b), 0xffffffffu);
148EIGEN_STRONG_INLINE PacketXi pcmp_eq<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
149 return svdup_n_s32_z(svcmpeq_s32(svptrue_b32(), a, b), 0xffffffffu);
153EIGEN_STRONG_INLINE PacketXi ptrue<PacketXi>(
const PacketXi& ) {
154 return svdup_n_s32_z(svptrue_b32(), 0xffffffffu);
158EIGEN_STRONG_INLINE PacketXi pzero<PacketXi>(
const PacketXi& ) {
159 return svdup_n_s32_z(svptrue_b32(), 0);
163EIGEN_STRONG_INLINE PacketXi pand<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
164 return svand_s32_z(svptrue_b32(), a, b);
168EIGEN_STRONG_INLINE PacketXi por<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
169 return svorr_s32_z(svptrue_b32(), a, b);
173EIGEN_STRONG_INLINE PacketXi pxor<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
174 return sveor_s32_z(svptrue_b32(), a, b);
178EIGEN_STRONG_INLINE PacketXi pandnot<PacketXi>(
const PacketXi& a,
const PacketXi& b) {
179 return svbic_s32_z(svptrue_b32(), a, b);
183EIGEN_STRONG_INLINE PacketXi parithmetic_shift_right(PacketXi a) {
184 return svasrd_n_s32_z(svptrue_b32(), a, N);
188EIGEN_STRONG_INLINE PacketXi plogical_shift_right(PacketXi a) {
189 return svreinterpret_s32_u32(svlsr_n_u32_z(svptrue_b32(), svreinterpret_u32_s32(a), N));
193EIGEN_STRONG_INLINE PacketXi plogical_shift_left(PacketXi a) {
194 return svlsl_n_s32_z(svptrue_b32(), a, N);
198EIGEN_STRONG_INLINE PacketXi pload<PacketXi>(
const numext::int32_t* from) {
199 EIGEN_DEBUG_ALIGNED_LOAD
return svld1_s32(svptrue_b32(), from);
203EIGEN_STRONG_INLINE PacketXi ploadu<PacketXi>(
const numext::int32_t* from) {
204 EIGEN_DEBUG_UNALIGNED_LOAD
return svld1_s32(svptrue_b32(), from);
208EIGEN_STRONG_INLINE PacketXi ploaddup<PacketXi>(
const numext::int32_t* from) {
209 svuint32_t indices = svindex_u32(0, 1);
210 indices = svzip1_u32(indices, indices);
211 return svld1_gather_u32index_s32(svptrue_b32(), from, indices);
215EIGEN_STRONG_INLINE PacketXi ploadquad<PacketXi>(
const numext::int32_t* from) {
216 svuint32_t indices = svindex_u32(0, 1);
217 indices = svzip1_u32(indices, indices);
218 indices = svzip1_u32(indices, indices);
219 return svld1_gather_u32index_s32(svptrue_b32(), from, indices);
223EIGEN_STRONG_INLINE
void pstore<numext::int32_t>(numext::int32_t* to,
const PacketXi& from) {
224 EIGEN_DEBUG_ALIGNED_STORE svst1_s32(svptrue_b32(), to, from);
228EIGEN_STRONG_INLINE
void pstoreu<numext::int32_t>(numext::int32_t* to,
const PacketXi& from) {
229 EIGEN_DEBUG_UNALIGNED_STORE svst1_s32(svptrue_b32(), to, from);
233EIGEN_DEVICE_FUNC
inline PacketXi pgather<numext::int32_t, PacketXi>(
const numext::int32_t* from, Index stride) {
235 svint32_t indices = svindex_s32(0, stride);
236 return svld1_gather_s32index_s32(svptrue_b32(), from, indices);
240EIGEN_DEVICE_FUNC
inline void pscatter<numext::int32_t, PacketXi>(numext::int32_t* to,
const PacketXi& from,
243 svint32_t indices = svindex_s32(0, stride);
244 svst1_scatter_s32index_s32(svptrue_b32(), to, indices, from);
248EIGEN_STRONG_INLINE numext::int32_t pfirst<PacketXi>(
const PacketXi& a) {
250 return svlasta_s32(svpfalse_b(), a);
254EIGEN_STRONG_INLINE PacketXi preverse(
const PacketXi& a) {
259EIGEN_STRONG_INLINE PacketXi pabs(
const PacketXi& a) {
260 return svabs_s32_z(svptrue_b32(), a);
264EIGEN_STRONG_INLINE numext::int32_t predux<PacketXi>(
const PacketXi& a) {
265 return static_cast<numext::int32_t
>(svaddv_s32(svptrue_b32(), a));
269EIGEN_STRONG_INLINE numext::int32_t predux_mul<PacketXi>(
const PacketXi& a) {
270 EIGEN_STATIC_ASSERT((EIGEN_ARM64_SVE_VL % 128 == 0), EIGEN_INTERNAL_ERROR_PLEASE_FILE_A_BUG_REPORT);
273 svint32_t prod = svmul_s32_z(svptrue_b32(), a, svrev_s32(a));
277 if (EIGEN_ARM64_SVE_VL >= 2048) {
278 half_prod = svtbl_s32(prod, svindex_u32(32, 1));
279 prod = svmul_s32_z(svptrue_b32(), prod, half_prod);
281 if (EIGEN_ARM64_SVE_VL >= 1024) {
282 half_prod = svtbl_s32(prod, svindex_u32(16, 1));
283 prod = svmul_s32_z(svptrue_b32(), prod, half_prod);
285 if (EIGEN_ARM64_SVE_VL >= 512) {
286 half_prod = svtbl_s32(prod, svindex_u32(8, 1));
287 prod = svmul_s32_z(svptrue_b32(), prod, half_prod);
289 if (EIGEN_ARM64_SVE_VL >= 256) {
290 half_prod = svtbl_s32(prod, svindex_u32(4, 1));
291 prod = svmul_s32_z(svptrue_b32(), prod, half_prod);
294 half_prod = svtbl_s32(prod, svindex_u32(2, 1));
295 prod = svmul_s32_z(svptrue_b32(), prod, half_prod);
298 return pfirst<PacketXi>(prod);
302EIGEN_STRONG_INLINE numext::int32_t predux_min<PacketXi>(
const PacketXi& a) {
303 return svminv_s32(svptrue_b32(), a);
307EIGEN_STRONG_INLINE numext::int32_t predux_max<PacketXi>(
const PacketXi& a) {
308 return svmaxv_s32(svptrue_b32(), a);
312EIGEN_DEVICE_FUNC
inline void ptranspose(PacketBlock<PacketXi, N>& kernel) {
313 int buffer[packet_traits<numext::int32_t>::size * N] = {0};
316 PacketXi stride_index = svindex_s32(0, N);
318 for (i = 0; i < N; i++) {
319 svst1_scatter_s32index_s32(svptrue_b32(), buffer + i, stride_index, kernel.packet[i]);
321 for (i = 0; i < N; i++) {
322 kernel.packet[i] = svld1_s32(svptrue_b32(), buffer + i * packet_traits<numext::int32_t>::size);
328typedef svfloat32_t PacketXf __attribute__((arm_sve_vector_bits(EIGEN_ARM64_SVE_VL)));
331struct packet_traits<float> : default_packet_traits {
332 typedef PacketXf type;
333 typedef PacketXf half;
338 size = sve_packet_size_selector<float, EIGEN_ARM64_SVE_VL>::size,
357 HasSin = EIGEN_FAST_MATH,
358 HasCos = EIGEN_FAST_MATH,
362 HasTanh = EIGEN_FAST_MATH,
363 HasErf = EIGEN_FAST_MATH
368struct unpacket_traits<PacketXf> {
370 typedef PacketXf half;
371 typedef PacketXi integer_packet;
374 size = sve_packet_size_selector<float, EIGEN_ARM64_SVE_VL>::size,
377 masked_load_available =
false,
378 masked_store_available =
false
383EIGEN_STRONG_INLINE PacketXf pset1<PacketXf>(
const float& from) {
384 return svdup_n_f32(from);
388EIGEN_STRONG_INLINE PacketXf pset1frombits<PacketXf>(numext::uint32_t from) {
389 return svreinterpret_f32_u32(svdup_n_u32_z(svptrue_b32(), from));
393EIGEN_STRONG_INLINE PacketXf plset<PacketXf>(
const float& a) {
394 float c[packet_traits<float>::size];
395 for (
int i = 0; i < packet_traits<float>::size; i++) c[i] = i;
396 return svadd_f32_z(svptrue_b32(), pset1<PacketXf>(a), svld1_f32(svptrue_b32(), c));
400EIGEN_STRONG_INLINE PacketXf padd<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
401 return svadd_f32_z(svptrue_b32(), a, b);
405EIGEN_STRONG_INLINE PacketXf psub<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
406 return svsub_f32_z(svptrue_b32(), a, b);
410EIGEN_STRONG_INLINE PacketXf pnegate(
const PacketXf& a) {
411 return svneg_f32_z(svptrue_b32(), a);
415EIGEN_STRONG_INLINE PacketXf pconj(
const PacketXf& a) {
420EIGEN_STRONG_INLINE PacketXf pmul<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
421 return svmul_f32_z(svptrue_b32(), a, b);
425EIGEN_STRONG_INLINE PacketXf pdiv<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
426 return svdiv_f32_z(svptrue_b32(), a, b);
430EIGEN_STRONG_INLINE PacketXf pmadd(
const PacketXf& a,
const PacketXf& b,
const PacketXf& c) {
431 return svmla_f32_z(svptrue_b32(), c, a, b);
435EIGEN_STRONG_INLINE PacketXf pmin<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
436 return svmin_f32_z(svptrue_b32(), a, b);
440EIGEN_STRONG_INLINE PacketXf pmin<PropagateNaN, PacketXf>(
const PacketXf& a,
const PacketXf& b) {
441 return pmin<PacketXf>(a, b);
445EIGEN_STRONG_INLINE PacketXf pmin<PropagateNumbers, PacketXf>(
const PacketXf& a,
const PacketXf& b) {
446 return svminnm_f32_z(svptrue_b32(), a, b);
450EIGEN_STRONG_INLINE PacketXf pmax<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
451 return svmax_f32_z(svptrue_b32(), a, b);
455EIGEN_STRONG_INLINE PacketXf pmax<PropagateNaN, PacketXf>(
const PacketXf& a,
const PacketXf& b) {
456 return pmax<PacketXf>(a, b);
460EIGEN_STRONG_INLINE PacketXf pmax<PropagateNumbers, PacketXf>(
const PacketXf& a,
const PacketXf& b) {
461 return svmaxnm_f32_z(svptrue_b32(), a, b);
467EIGEN_STRONG_INLINE PacketXf pcmp_le<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
468 return svreinterpret_f32_u32(svdup_n_u32_z(svcmple_f32(svptrue_b32(), a, b), 0xffffffffu));
472EIGEN_STRONG_INLINE PacketXf pcmp_lt<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
473 return svreinterpret_f32_u32(svdup_n_u32_z(svcmplt_f32(svptrue_b32(), a, b), 0xffffffffu));
477EIGEN_STRONG_INLINE PacketXf pcmp_eq<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
478 return svreinterpret_f32_u32(svdup_n_u32_z(svcmpeq_f32(svptrue_b32(), a, b), 0xffffffffu));
485EIGEN_STRONG_INLINE PacketXf pcmp_lt_or_nan<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
486 return svreinterpret_f32_u32(svdup_n_u32_z(svnot_b_z(svptrue_b32(), svcmpge_f32(svptrue_b32(), a, b)), 0xffffffffu));
490EIGEN_STRONG_INLINE PacketXf pfloor<PacketXf>(
const PacketXf& a) {
491 return svrintm_f32_z(svptrue_b32(), a);
495EIGEN_STRONG_INLINE PacketXf ptrue<PacketXf>(
const PacketXf& ) {
496 return svreinterpret_f32_u32(svdup_n_u32_z(svptrue_b32(), 0xffffffffu));
501EIGEN_STRONG_INLINE PacketXf pand<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
502 return svreinterpret_f32_u32(svand_u32_z(svptrue_b32(), svreinterpret_u32_f32(a), svreinterpret_u32_f32(b)));
506EIGEN_STRONG_INLINE PacketXf por<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
507 return svreinterpret_f32_u32(svorr_u32_z(svptrue_b32(), svreinterpret_u32_f32(a), svreinterpret_u32_f32(b)));
511EIGEN_STRONG_INLINE PacketXf pxor<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
512 return svreinterpret_f32_u32(sveor_u32_z(svptrue_b32(), svreinterpret_u32_f32(a), svreinterpret_u32_f32(b)));
516EIGEN_STRONG_INLINE PacketXf pandnot<PacketXf>(
const PacketXf& a,
const PacketXf& b) {
517 return svreinterpret_f32_u32(svbic_u32_z(svptrue_b32(), svreinterpret_u32_f32(a), svreinterpret_u32_f32(b)));
521EIGEN_STRONG_INLINE PacketXf pload<PacketXf>(
const float* from) {
522 EIGEN_DEBUG_ALIGNED_LOAD
return svld1_f32(svptrue_b32(), from);
526EIGEN_STRONG_INLINE PacketXf ploadu<PacketXf>(
const float* from) {
527 EIGEN_DEBUG_UNALIGNED_LOAD
return svld1_f32(svptrue_b32(), from);
531EIGEN_STRONG_INLINE PacketXf ploaddup<PacketXf>(
const float* from) {
532 svuint32_t indices = svindex_u32(0, 1);
533 indices = svzip1_u32(indices, indices);
534 return svld1_gather_u32index_f32(svptrue_b32(), from, indices);
538EIGEN_STRONG_INLINE PacketXf ploadquad<PacketXf>(
const float* from) {
539 svuint32_t indices = svindex_u32(0, 1);
540 indices = svzip1_u32(indices, indices);
541 indices = svzip1_u32(indices, indices);
542 return svld1_gather_u32index_f32(svptrue_b32(), from, indices);
546EIGEN_STRONG_INLINE
void pstore<float>(
float* to,
const PacketXf& from) {
547 EIGEN_DEBUG_ALIGNED_STORE svst1_f32(svptrue_b32(), to, from);
551EIGEN_STRONG_INLINE
void pstoreu<float>(
float* to,
const PacketXf& from) {
552 EIGEN_DEBUG_UNALIGNED_STORE svst1_f32(svptrue_b32(), to, from);
556EIGEN_DEVICE_FUNC
inline PacketXf pgather<float, PacketXf>(
const float* from, Index stride) {
558 svint32_t indices = svindex_s32(0, stride);
559 return svld1_gather_s32index_f32(svptrue_b32(), from, indices);
563EIGEN_DEVICE_FUNC
inline void pscatter<float, PacketXf>(
float* to,
const PacketXf& from, Index stride) {
565 svint32_t indices = svindex_s32(0, stride);
566 svst1_scatter_s32index_f32(svptrue_b32(), to, indices, from);
570EIGEN_STRONG_INLINE
float pfirst<PacketXf>(
const PacketXf& a) {
572 return svlasta_f32(svpfalse_b(), a);
576EIGEN_STRONG_INLINE PacketXf preverse(
const PacketXf& a) {
581EIGEN_STRONG_INLINE PacketXf pabs(
const PacketXf& a) {
582 return svabs_f32_z(svptrue_b32(), a);
588EIGEN_STRONG_INLINE PacketXf pfrexp<PacketXf>(
const PacketXf& a, PacketXf& exponent) {
589 return pfrexp_generic(a, exponent);
593EIGEN_STRONG_INLINE
float predux<PacketXf>(
const PacketXf& a) {
594 return svaddv_f32(svptrue_b32(), a);
601EIGEN_STRONG_INLINE
float predux_mul<PacketXf>(
const PacketXf& a) {
602 EIGEN_STATIC_ASSERT((EIGEN_ARM64_SVE_VL % 128 == 0), EIGEN_INTERNAL_ERROR_PLEASE_FILE_A_BUG_REPORT);
604 svfloat32_t prod = svmul_f32_z(svptrue_b32(), a, svrev_f32(a));
605 svfloat32_t half_prod;
608 if (EIGEN_ARM64_SVE_VL >= 2048) {
609 half_prod = svtbl_f32(prod, svindex_u32(32, 1));
610 prod = svmul_f32_z(svptrue_b32(), prod, half_prod);
612 if (EIGEN_ARM64_SVE_VL >= 1024) {
613 half_prod = svtbl_f32(prod, svindex_u32(16, 1));
614 prod = svmul_f32_z(svptrue_b32(), prod, half_prod);
616 if (EIGEN_ARM64_SVE_VL >= 512) {
617 half_prod = svtbl_f32(prod, svindex_u32(8, 1));
618 prod = svmul_f32_z(svptrue_b32(), prod, half_prod);
620 if (EIGEN_ARM64_SVE_VL >= 256) {
621 half_prod = svtbl_f32(prod, svindex_u32(4, 1));
622 prod = svmul_f32_z(svptrue_b32(), prod, half_prod);
625 half_prod = svtbl_f32(prod, svindex_u32(2, 1));
626 prod = svmul_f32_z(svptrue_b32(), prod, half_prod);
629 return pfirst<PacketXf>(prod);
633EIGEN_STRONG_INLINE
float predux_min<PacketXf>(
const PacketXf& a) {
634 return svminv_f32(svptrue_b32(), a);
638EIGEN_STRONG_INLINE
float predux_max<PacketXf>(
const PacketXf& a) {
639 return svmaxv_f32(svptrue_b32(), a);
643EIGEN_DEVICE_FUNC
inline void ptranspose(PacketBlock<PacketXf, N>& kernel) {
644 float buffer[packet_traits<float>::size * N] = {0};
647 PacketXi stride_index = svindex_s32(0, N);
649 for (i = 0; i < N; i++) {
650 svst1_scatter_s32index_f32(svptrue_b32(), buffer + i, stride_index, kernel.packet[i]);
653 for (i = 0; i < N; i++) {
654 kernel.packet[i] = svld1_f32(svptrue_b32(), buffer + i * packet_traits<float>::size);
659EIGEN_STRONG_INLINE PacketXf pldexp<PacketXf>(
const PacketXf& a,
const PacketXf& exponent) {
660 return pldexp_generic(a, exponent);
@ Aligned64
Definition Constants.h:239
Namespace containing all symbols from the Eigen library.
Definition Core:137