39 #ifndef _GLIBCXX_EXPERIMENTAL_SIMD_FIXED_SIZE_H_
40 #define _GLIBCXX_EXPERIMENTAL_SIMD_FIXED_SIZE_H_
42 #if __cplusplus >= 201703L
46 _GLIBCXX_SIMD_BEGIN_NAMESPACE
49 template <
size_t _I,
typename _Tp>
50 struct __simd_tuple_element;
52 template <
typename _Tp,
typename _A0,
typename... _As>
53 struct __simd_tuple_element<0, _SimdTuple<_Tp, _A0, _As...>>
54 {
using type = simd<_Tp, _A0>; };
56 template <
size_t _I,
typename _Tp,
typename _A0,
typename... _As>
57 struct __simd_tuple_element<_I, _SimdTuple<_Tp, _A0, _As...>>
60 typename __simd_tuple_element<_I - 1, _SimdTuple<_Tp, _As...>>::type;
63 template <
size_t _I,
typename _Tp>
64 using __simd_tuple_element_t =
typename __simd_tuple_element<_I, _Tp>::type;
69 template <
typename _Tp,
typename... _A0s,
typename... _A1s>
70 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple<_Tp, _A0s..., _A1s...>
71 __simd_tuple_concat(
const _SimdTuple<_Tp, _A0s...>& __left,
72 const _SimdTuple<_Tp, _A1s...>& __right)
74 if constexpr (
sizeof...(_A0s) == 0)
76 else if constexpr (sizeof...(_A1s) == 0)
79 return {__left.first, __simd_tuple_concat(__left.second, __right)};
82 template <
typename _Tp,
typename _A10,
typename... _A1s>
83 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple<_Tp, simd_abi::scalar, _A10,
85 __simd_tuple_concat(
const _Tp& __left,
86 const _SimdTuple<_Tp, _A10, _A1s...>& __right)
87 {
return {__left, __right}; }
93 template <
size_t _Np,
typename _Tp>
94 _GLIBCXX_SIMD_INTRINSIC constexpr decltype(
auto)
95 __simd_tuple_pop_front(_Tp&& __x)
97 if constexpr (_Np == 0)
98 return static_cast<_Tp&&>(__x);
101 using _Up = __remove_cvref_t<_Tp>;
102 static_assert(_Np >= _Up::_S_first_size);
103 return __simd_tuple_pop_front<_Np - _Up::_S_first_size>(__x.second);
111 struct __as_simd_tuple {};
113 template <
typename _Tp,
typename _A0,
typename... _Abis>
114 _GLIBCXX_SIMD_INTRINSIC constexpr simd<_Tp, _A0>
115 __simd_tuple_get_impl(__as_simd,
const _SimdTuple<_Tp, _A0, _Abis...>& __t,
117 {
return {__private_init, __t.first}; }
119 template <
typename _Tp,
typename _A0,
typename... _Abis>
120 _GLIBCXX_SIMD_INTRINSIC constexpr
const auto&
121 __simd_tuple_get_impl(__as_simd_tuple,
122 const _SimdTuple<_Tp, _A0, _Abis...>& __t,
124 {
return __t.first; }
126 template <
typename _Tp,
typename _A0,
typename... _Abis>
127 _GLIBCXX_SIMD_INTRINSIC constexpr
auto&
128 __simd_tuple_get_impl(__as_simd_tuple, _SimdTuple<_Tp, _A0, _Abis...>& __t,
130 {
return __t.first; }
132 template <
typename _R,
size_t _Np,
typename _Tp,
typename... _Abis>
133 _GLIBCXX_SIMD_INTRINSIC constexpr
auto
134 __simd_tuple_get_impl(_R,
const _SimdTuple<_Tp, _Abis...>& __t,
136 {
return __simd_tuple_get_impl(_R(), __t.second, _SizeConstant<_Np - 1>()); }
138 template <
size_t _Np,
typename _Tp,
typename... _Abis>
139 _GLIBCXX_SIMD_INTRINSIC constexpr
auto&
140 __simd_tuple_get_impl(__as_simd_tuple, _SimdTuple<_Tp, _Abis...>& __t,
143 return __simd_tuple_get_impl(__as_simd_tuple(), __t.second,
144 _SizeConstant<_Np - 1>());
147 template <
size_t _Np,
typename _Tp,
typename... _Abis>
148 _GLIBCXX_SIMD_INTRINSIC constexpr
auto
149 __get_simd_at(
const _SimdTuple<_Tp, _Abis...>& __t)
150 {
return __simd_tuple_get_impl(__as_simd(), __t, _SizeConstant<_Np>()); }
154 template <
size_t _Np,
typename _Tp,
typename... _Abis>
155 _GLIBCXX_SIMD_INTRINSIC constexpr
auto
156 __get_tuple_at(
const _SimdTuple<_Tp, _Abis...>& __t)
158 return __simd_tuple_get_impl(__as_simd_tuple(), __t, _SizeConstant<_Np>());
161 template <
size_t _Np,
typename _Tp,
typename... _Abis>
162 _GLIBCXX_SIMD_INTRINSIC constexpr
auto&
163 __get_tuple_at(_SimdTuple<_Tp, _Abis...>& __t)
165 return __simd_tuple_get_impl(__as_simd_tuple(), __t, _SizeConstant<_Np>());
169 template <
typename _Tp,
typename _Abi,
size_t _Offset>
170 struct __tuple_element_meta :
public _Abi::_SimdImpl
172 static_assert(is_same_v<
typename _Abi::_SimdImpl::abi_type,
175 using value_type = _Tp;
176 using abi_type = _Abi;
177 using _Traits = _SimdTraits<_Tp, _Abi>;
178 using _MaskImpl =
typename _Abi::_MaskImpl;
179 using _MaskMember =
typename _Traits::_MaskMember;
180 using simd_type = simd<_Tp, _Abi>;
181 static constexpr
size_t _S_offset = _Offset;
182 static constexpr
size_t _S_size() {
return simd_size<_Tp, _Abi>::value; }
183 static constexpr _MaskImpl _S_mask_impl = {};
185 template <
size_t _Np,
bool _Sanitized>
186 _GLIBCXX_SIMD_INTRINSIC
static auto
187 _S_submask(_BitMask<_Np, _Sanitized> __bits)
188 {
return __bits.template _M_extract<_Offset, _S_size()>(); }
190 template <
size_t _Np,
bool _Sanitized>
191 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
192 _S_make_mask(_BitMask<_Np, _Sanitized> __bits)
194 return _MaskImpl::template _S_convert<_Tp>(
195 __bits.template _M_extract<_Offset, _S_size()>()._M_sanitized());
198 _GLIBCXX_SIMD_INTRINSIC
static _ULLong
199 _S_mask_to_shifted_ullong(_MaskMember __k)
200 {
return _MaskImpl::_S_to_bits(__k).to_ullong() << _Offset; }
203 template <
size_t _Offset,
typename _Tp,
typename _Abi,
typename... _As>
204 __tuple_element_meta<_Tp, _Abi, _Offset>
205 __make_meta(
const _SimdTuple<_Tp, _Abi, _As...>&)
210 template <
size_t _Offset,
typename _Base>
211 struct _WithOffset :
public _Base
213 static inline constexpr
size_t _S_offset = _Offset;
215 _GLIBCXX_SIMD_INTRINSIC
char* _M_as_charptr()
217 return reinterpret_cast<char*
>(
this)
218 + _S_offset *
sizeof(
typename _Base::value_type);
221 _GLIBCXX_SIMD_INTRINSIC
const char* _M_as_charptr()
const
223 return reinterpret_cast<const char*
>(
this)
224 + _S_offset *
sizeof(
typename _Base::value_type);
229 template <
size_t _O0,
size_t _O1,
typename _Base>
230 struct _WithOffset<_O0, _WithOffset<_O1, _Base>> {};
232 template <
size_t _Offset,
typename _Tp>
235 {
return static_cast<_WithOffset<_Offset, __remove_cvref_t<_Tp>
>&>(
__base); }
237 template <
size_t _Offset,
typename _Tp>
239 __add_offset(const _Tp&
__base)
241 return static_cast<const _WithOffset<_Offset, __remove_cvref_t<_Tp>
>&>(
245 template <
size_t _Offset,
size_t _ExistingOffset,
typename _Tp>
247 __add_offset(_WithOffset<_ExistingOffset, _Tp>&
__base)
249 return static_cast<_WithOffset<_Offset + _ExistingOffset, _Tp>&
>(
250 static_cast<_Tp&
>(
__base));
253 template <
size_t _Offset,
size_t _ExistingOffset,
typename _Tp>
255 __add_offset(const _WithOffset<_ExistingOffset, _Tp>&
__base)
257 return static_cast<const _WithOffset<_Offset + _ExistingOffset, _Tp>&
>(
258 static_cast<const _Tp&
>(
__base));
261 template <
typename _Tp>
262 constexpr
inline size_t __offset = 0;
264 template <
size_t _Offset,
typename _Tp>
265 constexpr
inline size_t __offset<_WithOffset<_Offset, _Tp>>
266 = _WithOffset<_Offset, _Tp>::_S_offset;
268 template <
typename _Tp>
269 constexpr
inline size_t __offset<const _Tp> = __offset<_Tp>;
271 template <
typename _Tp>
272 constexpr
inline size_t __offset<_Tp&> = __offset<_Tp>;
274 template <
typename _Tp>
275 constexpr
inline size_t __offset<_Tp&&> = __offset<_Tp>;
280 template <
typename _Tp>
281 struct _SimdTuple<_Tp>
283 using value_type = _Tp;
284 static constexpr
size_t _S_tuple_size = 0;
285 static constexpr
size_t _S_size() {
return 0; }
289 template <
typename _FirstType,
typename _SecondType>
290 struct _SimdTupleData
295 _GLIBCXX_SIMD_INTRINSIC
296 constexpr
bool _M_is_constprop()
const
298 if constexpr (is_class_v<_FirstType>)
299 return first._M_is_constprop() && second._M_is_constprop();
301 return __builtin_constant_p(first) && second._M_is_constprop();
305 template <
typename _FirstType,
typename _Tp>
306 struct _SimdTupleData<_FirstType, _SimdTuple<_Tp>>
309 static constexpr _SimdTuple<_Tp> second = {};
311 _GLIBCXX_SIMD_INTRINSIC
312 constexpr
bool _M_is_constprop()
const
314 if constexpr (is_class_v<_FirstType>)
315 return first._M_is_constprop();
317 return __builtin_constant_p(first);
322 template <
typename _Tp,
typename _Abi0,
typename... _Abis>
323 struct _SimdTuple<_Tp, _Abi0, _Abis...>
324 : _SimdTupleData<typename _SimdTraits<_Tp, _Abi0>::_SimdMember,
325 _SimdTuple<_Tp, _Abis...>>
327 static_assert(!__is_fixed_size_abi_v<_Abi0>);
328 using value_type = _Tp;
329 using _FirstType =
typename _SimdTraits<_Tp, _Abi0>::_SimdMember;
330 using _FirstAbi = _Abi0;
331 using _SecondType = _SimdTuple<_Tp, _Abis...>;
332 static constexpr
size_t _S_tuple_size =
sizeof...(_Abis) + 1;
334 static constexpr
size_t _S_size()
335 {
return simd_size_v<_Tp, _Abi0> + _SecondType::_S_size(); }
337 static constexpr
size_t _S_first_size = simd_size_v<_Tp, _Abi0>;
338 static constexpr
bool _S_is_homogeneous = (is_same_v<_Abi0, _Abis> && ...);
340 using _Base = _SimdTupleData<typename _SimdTraits<_Tp, _Abi0>::_SimdMember,
341 _SimdTuple<_Tp, _Abis...>>;
345 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple() =
default;
346 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple(
const _SimdTuple&) =
default;
347 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple& operator=(
const _SimdTuple&)
350 template <
typename _Up>
351 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple(_Up&& __x)
352 : _Base{static_cast<_Up&&>(__x)} {}
354 template <
typename _Up,
typename _Up2>
355 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple(_Up&& __x, _Up2&& __y)
356 : _Base{static_cast<_Up&&>(__x), static_cast<_Up2&&>(__y)} {}
358 template <
typename _Up>
359 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple(_Up&& __x, _SimdTuple<_Tp>)
360 : _Base{static_cast<_Up&&>(__x)} {}
362 _GLIBCXX_SIMD_INTRINSIC
char* _M_as_charptr()
363 {
return reinterpret_cast<char*
>(
this); }
365 _GLIBCXX_SIMD_INTRINSIC
const char* _M_as_charptr()
const
366 {
return reinterpret_cast<const char*
>(
this); }
368 template <
size_t _Np>
369 _GLIBCXX_SIMD_INTRINSIC constexpr
auto& _M_at()
371 if constexpr (_Np == 0)
374 return second.template _M_at<_Np - 1>();
377 template <
size_t _Np>
378 _GLIBCXX_SIMD_INTRINSIC constexpr const auto& _M_at()
const
380 if constexpr (_Np == 0)
383 return second.template _M_at<_Np - 1>();
386 template <
size_t _Np>
387 _GLIBCXX_SIMD_INTRINSIC constexpr auto _M_simd_at()
const
389 if constexpr (_Np == 0)
390 return simd<_Tp, _Abi0>(__private_init, first);
392 return second.template _M_simd_at<_Np - 1>();
395 template <
size_t _Offset = 0, typename _Fp>
396 _GLIBCXX_SIMD_INTRINSIC static constexpr _SimdTuple
397 _S_generate(_Fp&& __gen, _SizeConstant<_Offset> = {})
399 auto&& __first = __gen(__tuple_element_meta<_Tp, _Abi0, _Offset>());
400 if constexpr (_S_tuple_size == 1)
404 _SecondType::_S_generate(
405 static_cast<_Fp&&
>(__gen),
406 _SizeConstant<_Offset + simd_size_v<_Tp, _Abi0>>())};
409 template <
size_t _Offset = 0,
typename _Fp,
typename... _More>
410 _GLIBCXX_SIMD_INTRINSIC _SimdTuple
411 _M_apply_wrapped(_Fp&& __fun,
const _More&... __more)
const
414 = __fun(__make_meta<_Offset>(*
this), first, __more.first...);
415 if constexpr (_S_tuple_size == 1)
420 second.template _M_apply_wrapped<_Offset + simd_size_v<_Tp, _Abi0>>(
421 static_cast<_Fp&&
>(__fun), __more.second...)};
424 template <
typename _Tup>
425 _GLIBCXX_SIMD_INTRINSIC constexpr decltype(
auto)
426 _M_extract_argument(_Tup&& __tup)
const
428 using _TupT =
typename __remove_cvref_t<_Tup>::value_type;
429 if constexpr (is_same_v<_SimdTuple, __remove_cvref_t<_Tup>>)
431 else if (__builtin_is_constant_evaluated())
432 return __fixed_size_storage_t<_TupT, _S_first_size>::_S_generate([&](
433 auto __meta) constexpr {
434 return __meta._S_generator(
435 [&](
auto __i) constexpr {
return __tup[__i]; },
436 static_cast<_TupT*
>(
nullptr));
440 __fixed_size_storage_t<_TupT, _S_first_size> __r;
441 __builtin_memcpy(__r._M_as_charptr(), __tup._M_as_charptr(),
447 template <
typename _Tup>
448 _GLIBCXX_SIMD_INTRINSIC constexpr
auto&
449 _M_skip_argument(_Tup&& __tup)
const
451 static_assert(_S_tuple_size > 1);
452 using _Up = __remove_cvref_t<_Tup>;
453 constexpr
size_t __off = __offset<_Up>;
454 if constexpr (_S_first_size == _Up::_S_first_size && __off == 0)
456 else if constexpr (_S_first_size > _Up::_S_first_size
457 && _S_first_size % _Up::_S_first_size == 0
459 return __simd_tuple_pop_front<_S_first_size>(__tup);
460 else if constexpr (_S_first_size + __off < _Up::_S_first_size)
461 return __add_offset<_S_first_size>(__tup);
462 else if constexpr (_S_first_size + __off == _Up::_S_first_size)
465 __assert_unreachable<_Tup>();
468 template <
size_t _Offset, typename... _More>
469 _GLIBCXX_SIMD_INTRINSIC constexpr
void
470 _M_assign_front(const _SimdTuple<_Tp, _Abi0, _More...>& __x) &
472 static_assert(_Offset == 0);
474 if constexpr (
sizeof...(_More) > 0)
476 static_assert(
sizeof...(_Abis) >=
sizeof...(_More));
477 second.template _M_assign_front<0>(__x.second);
481 template <
size_t _Offset>
482 _GLIBCXX_SIMD_INTRINSIC constexpr
void
483 _M_assign_front(
const _FirstType& __x) &
485 static_assert(_Offset == 0);
489 template <
size_t _Offset,
typename... _As>
490 _GLIBCXX_SIMD_INTRINSIC constexpr
void
491 _M_assign_front(
const _SimdTuple<_Tp, _As...>& __x) &
493 __builtin_memcpy(_M_as_charptr() + _Offset *
sizeof(value_type),
495 sizeof(_Tp) * _SimdTuple<_Tp, _As...>::_S_size());
503 template <
typename _Fp,
typename... _More>
504 _GLIBCXX_SIMD_INTRINSIC constexpr _SimdTuple
505 _M_apply_per_chunk(_Fp&& __fun, _More&&... __more)
const
509 is_lvalue_reference<_More>,
510 negation<is_const<remove_reference_t<_More>>>>) )
513 auto&& __first = [&](
auto... __args) constexpr
515 auto __r = __fun(__tuple_element_meta<_Tp, _Abi0, 0>(), first,
517 [[maybe_unused]]
auto&& __ignore_me = {(
518 [](
auto&& __dst,
const auto& __src) {
519 if constexpr (is_assignable_v<decltype(__dst),
522 __dst.template _M_assign_front<__offset<decltype(__dst)>>(
525 }(
static_cast<_More&&
>(__more), __args),
529 (_M_extract_argument(__more)...);
530 if constexpr (_S_tuple_size == 1)
534 second._M_apply_per_chunk(
static_cast<_Fp&&
>(__fun),
535 _M_skip_argument(__more)...)};
539 auto&& __first = __fun(__tuple_element_meta<_Tp, _Abi0, 0>(), first,
540 _M_extract_argument(__more)...);
541 if constexpr (_S_tuple_size == 1)
545 second._M_apply_per_chunk(
static_cast<_Fp&&
>(__fun),
546 _M_skip_argument(__more)...)};
550 template <
typename _R = _Tp,
typename _Fp,
typename... _More>
551 _GLIBCXX_SIMD_INTRINSIC
auto _M_apply_r(_Fp&& __fun,
552 const _More&... __more)
const
554 auto&& __first = __fun(__tuple_element_meta<_Tp, _Abi0, 0>(), first,
556 if constexpr (_S_tuple_size == 1)
559 return __simd_tuple_concat<_R>(
560 __first, second.template _M_apply_r<_R>(static_cast<_Fp&&>(__fun),
564 template <typename _Fp, typename... _More>
565 _GLIBCXX_SIMD_INTRINSIC constexpr friend _SanitizedBitMask<_S_size()>
566 _M_test(const _Fp& __fun, const _SimdTuple& __x, const _More&... __more)
568 const _SanitizedBitMask<_S_first_size> __first
569 = _Abi0::_MaskImpl::_S_to_bits(
570 __fun(__tuple_element_meta<_Tp, _Abi0, 0>(), __x.first,
572 if constexpr (_S_tuple_size == 1)
575 return _M_test(__fun, __x.second, __more.second...)
576 ._M_prepend(__first);
579 template <typename _Up, _Up _I>
580 _GLIBCXX_SIMD_INTRINSIC constexpr _Tp
581 operator[](integral_constant<_Up, _I>) const noexcept
583 if constexpr (_I < simd_size_v<_Tp, _Abi0>)
584 return _M_subscript_read(_I);
586 return second[integral_constant<_Up, _I - simd_size_v<_Tp, _Abi0>>()];
589 _Tp operator[](
size_t __i)
const noexcept
591 if constexpr (_S_tuple_size == 1)
592 return _M_subscript_read(__i);
595 #ifdef _GLIBCXX_SIMD_USE_ALIASING_LOADS
596 return reinterpret_cast<const __may_alias<_Tp>*
>(
this)[__i];
598 if constexpr (__is_scalar_abi<_Abi0>())
600 const _Tp* ptr = &first;
604 return __i < simd_size_v<_Tp, _Abi0>
605 ? _M_subscript_read(__i)
606 : second[__i - simd_size_v<_Tp, _Abi0>];
611 void _M_set(
size_t __i, _Tp __val) noexcept
613 if constexpr (_S_tuple_size == 1)
614 return _M_subscript_write(__i, __val);
617 #ifdef _GLIBCXX_SIMD_USE_ALIASING_LOADS
618 reinterpret_cast<__may_alias<_Tp>*
>(
this)[__i] = __val;
620 if (__i < simd_size_v<_Tp, _Abi0>)
621 _M_subscript_write(__i, __val);
623 second._M_set(__i - simd_size_v<_Tp, _Abi0>, __val);
630 _Tp _M_subscript_read([[maybe_unused]]
size_t __i)
const noexcept
632 if constexpr (__is_vectorizable_v<_FirstType>)
638 void _M_subscript_write([[maybe_unused]]
size_t __i, _Tp __y) noexcept
640 if constexpr (__is_vectorizable_v<_FirstType>)
643 first._M_set(__i, __y);
650 template <
typename _Tp,
typename _A0>
651 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp, _A0>
652 __make_simd_tuple(simd<_Tp, _A0> __x0)
653 {
return {__data(__x0)}; }
655 template <
typename _Tp,
typename _A0,
typename... _As>
656 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp, _A0, _As...>
657 __make_simd_tuple(
const simd<_Tp, _A0>& __x0,
const simd<_Tp, _As>&... __xs)
658 {
return {__data(__x0), __make_simd_tuple(__xs...)}; }
660 template <
typename _Tp,
typename _A0>
661 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp, _A0>
662 __make_simd_tuple(
const typename _SimdTraits<_Tp, _A0>::_SimdMember& __arg0)
665 template <
typename _Tp,
typename _A0,
typename _A1,
typename... _Abis>
666 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp, _A0, _A1, _Abis...>
668 const typename _SimdTraits<_Tp, _A0>::_SimdMember& __arg0,
669 const typename _SimdTraits<_Tp, _A1>::_SimdMember& __arg1,
670 const typename _SimdTraits<_Tp, _Abis>::_SimdMember&... __args)
671 {
return {__arg0, __make_simd_tuple<_Tp, _A1, _Abis...>(__arg1, __args...)}; }
674 template <
typename _Tp,
size_t _Np,
typename _V,
size_t _NV,
typename... _VX>
675 _GLIBCXX_SIMD_INTRINSIC constexpr __fixed_size_storage_t<_Tp, _Np>
676 __to_simd_tuple(
const array<_V, _NV>& __from,
const _VX... __fromX);
678 template <
typename _Tp,
size_t _Np,
680 typename _R = __fixed_size_storage_t<_Tp, _Np>,
typename _V0,
681 typename _V0VT = _VectorTraits<_V0>,
typename... _VX>
682 _GLIBCXX_SIMD_INTRINSIC _R constexpr __to_simd_tuple(
const _V0 __from0,
683 const _VX... __fromX)
685 static_assert(is_same_v<typename _V0VT::value_type, _Tp>);
686 static_assert(_Offset < _V0VT::_S_full_size);
687 using _R0 = __vector_type_t<_Tp, _R::_S_first_size>;
688 if constexpr (_R::_S_tuple_size == 1)
690 if constexpr (_Np == 1)
691 return _R{__from0[_Offset]};
692 else if constexpr (_Offset == 0 && _V0VT::_S_full_size >= _Np)
693 return _R{__intrin_bitcast<_R0>(__from0)};
694 else if constexpr (_Offset * 2 == _V0VT::_S_full_size
695 && _V0VT::_S_full_size / 2 >= _Np)
696 return _R{__intrin_bitcast<_R0>(__extract_part<1, 2>(__from0))};
697 else if constexpr (_Offset * 4 == _V0VT::_S_full_size
698 && _V0VT::_S_full_size / 4 >= _Np)
699 return _R{__intrin_bitcast<_R0>(__extract_part<1, 4>(__from0))};
701 __assert_unreachable<_Tp>();
705 if constexpr (1 == _R::_S_first_size)
707 if constexpr (_Offset + 1 < _V0VT::_S_full_size)
708 return _R{__from0[_Offset],
709 __to_simd_tuple<_Tp, _Np - 1, _Offset + 1>(__from0,
712 return _R{__from0[_Offset],
713 __to_simd_tuple<_Tp, _Np - 1, 0>(__fromX...)};
717 else if constexpr (_V0VT::_S_full_size == _R::_S_first_size
720 __to_simd_tuple<_Tp, _Np - _R::_S_first_size>(__fromX...)};
723 else if constexpr (_V0VT::_S_full_size > _R::_S_first_size
725 return _R{__intrin_bitcast<_R0>(__from0),
726 __to_simd_tuple<_Tp, _Np - _R::_S_first_size,
727 _R::_S_first_size>(__from0, __fromX...)};
731 else if constexpr (_Offset * 4 == _V0VT::_S_full_size
732 && _V0VT::_S_full_size >= 4 * _R::_S_first_size)
733 return _R{__intrin_bitcast<_R0>(__extract_part<2, 4>(__from0)),
734 __to_simd_tuple<_Tp, _Np - _R::_S_first_size,
735 _Offset + _R::_S_first_size>(__from0,
740 else if constexpr (_Offset * 2 == _V0VT::_S_full_size
741 && _V0VT::_S_full_size >= 4 * _R::_S_first_size)
742 return _R{__intrin_bitcast<_R0>(__extract_part<2, 4>(__from0)),
743 __to_simd_tuple<_Tp, _Np - _R::_S_first_size,
744 _Offset + _R::_S_first_size>(__from0,
748 else if constexpr (_Offset * 2 == _V0VT::_S_full_size
749 && _V0VT::_S_full_size / 2 >= _R::_S_first_size)
750 return _R{__intrin_bitcast<_R0>(__extract_part<1, 2>(__from0)),
751 __to_simd_tuple<_Tp, _Np - _R::_S_first_size, 0>(
756 __assert_unreachable<_Tp>();
760 template <
typename _Tp,
size_t _Np,
typename _V,
size_t _NV,
typename... _VX>
761 _GLIBCXX_SIMD_INTRINSIC constexpr __fixed_size_storage_t<_Tp, _Np>
762 __to_simd_tuple(
const array<_V, _NV>& __from,
const _VX... __fromX)
764 if constexpr (is_same_v<_Tp, _V>)
768 "An array of scalars must be the last argument to __to_simd_tuple");
769 return __call_with_subscripts(
771 make_index_sequence<_NV>(), [&](
const auto... __args) constexpr {
772 return __simd_tuple_concat(
773 _SimdTuple<_Tp, simd_abi::scalar>{__args}..., _SimdTuple<_Tp>());
777 return __call_with_subscripts(
779 make_index_sequence<_NV>(), [&](
const auto... __args) constexpr {
780 return __to_simd_tuple<_Tp, _Np>(__args..., __fromX...);
784 template <
size_t,
typename _Tp>
785 using __to_tuple_helper = _Tp;
787 template <
typename _Tp,
typename _A0,
size_t _NOut,
size_t _Np,
789 _GLIBCXX_SIMD_INTRINSIC __fixed_size_storage_t<_Tp, _NOut>
790 __to_simd_tuple_impl(index_sequence<_Indexes...>,
791 const array<__vector_type_t<_Tp, simd_size_v<_Tp, _A0>>, _Np>& __args)
793 return __make_simd_tuple<_Tp, __to_tuple_helper<_Indexes, _A0>...>(
794 __args[_Indexes]...);
797 template <
typename _Tp,
typename _A0,
size_t _NOut,
size_t _Np,
798 typename _R = __fixed_size_storage_t<_Tp, _NOut>>
799 _GLIBCXX_SIMD_INTRINSIC _R
800 __to_simd_tuple_sized(
801 const array<__vector_type_t<_Tp, simd_size_v<_Tp, _A0>>, _Np>& __args)
803 static_assert(_Np * simd_size_v<_Tp, _A0> >= _NOut);
804 return __to_simd_tuple_impl<_Tp, _A0, _NOut>(
805 make_index_sequence<_R::_S_tuple_size>(), __args);
809 template <
typename _Tp>
810 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp>
811 __optimize_simd_tuple(
const _SimdTuple<_Tp>)
814 template <
typename _Tp,
typename _Ap>
815 _GLIBCXX_SIMD_INTRINSIC
const _SimdTuple<_Tp, _Ap>&
816 __optimize_simd_tuple(
const _SimdTuple<_Tp, _Ap>& __x)
819 template <
typename _Tp,
typename _A0,
typename _A1,
typename... _Abis,
820 typename _R = __fixed_size_storage_t<
821 _Tp, _SimdTuple<_Tp, _A0, _A1, _Abis...>::_S_size()>>
822 _GLIBCXX_SIMD_INTRINSIC _R
823 __optimize_simd_tuple(
const _SimdTuple<_Tp, _A0, _A1, _Abis...>& __x)
825 using _Tup = _SimdTuple<_Tp, _A0, _A1, _Abis...>;
826 if constexpr (is_same_v<_R, _Tup>)
828 else if constexpr (is_same_v<
typename _R::_FirstType,
829 typename _Tup::_FirstType>)
830 return {__x.first, __optimize_simd_tuple(__x.second)};
831 else if constexpr (__is_scalar_abi<_A0>()
832 || _A0::template _S_is_partial<_Tp>)
833 return {__generate_from_n_evaluations<_R::_S_first_size,
834 typename _R::_FirstType>(
835 [&](
auto __i) {
return __x[__i]; }),
836 __optimize_simd_tuple(
837 __simd_tuple_pop_front<_R::_S_first_size>(__x))};
838 else if constexpr (is_same_v<_A0, _A1>
839 && _R::_S_first_size == simd_size_v<_Tp, _A0> + simd_size_v<_Tp, _A1>)
840 return {__concat(__x.template _M_at<0>(), __x.template _M_at<1>()),
841 __optimize_simd_tuple(__x.second.second)};
842 else if constexpr (
sizeof...(_Abis) >= 2
843 && _R::_S_first_size == (4 * simd_size_v<_Tp, _A0>)
844 && simd_size_v<_Tp, _A0> == __simd_tuple_element_t<
845 (
sizeof...(_Abis) >= 2 ? 3 : 0), _Tup>::
size())
847 __concat(__concat(__x.template _M_at<0>(), __x.template _M_at<1>()),
848 __concat(__x.template _M_at<2>(), __x.template _M_at<3>())),
849 __optimize_simd_tuple(__x.second.second.second.second)};
852 static_assert(
sizeof(_R) ==
sizeof(__x));
854 __builtin_memcpy(__r._M_as_charptr(), __x._M_as_charptr(),
855 sizeof(_Tp) * _R::_S_size());
861 template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _Fp>
862 _GLIBCXX_SIMD_INTRINSIC constexpr
void
863 __for_each(
const _SimdTuple<_Tp, _A0>& __t, _Fp&& __fun)
864 {
static_cast<_Fp&&
>(__fun)(__make_meta<_Offset>(__t), __t.first); }
866 template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _A1,
867 typename... _As,
typename _Fp>
868 _GLIBCXX_SIMD_INTRINSIC constexpr
void
869 __for_each(
const _SimdTuple<_Tp, _A0, _A1, _As...>& __t, _Fp&& __fun)
871 __fun(__make_meta<_Offset>(__t), __t.first);
872 __for_each<_Offset + simd_size<_Tp, _A0>::value>(__t.second,
873 static_cast<_Fp&&
>(__fun));
877 template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _Fp>
878 _GLIBCXX_SIMD_INTRINSIC constexpr
void
879 __for_each(_SimdTuple<_Tp, _A0>& __t, _Fp&& __fun)
880 {
static_cast<_Fp&&
>(__fun)(__make_meta<_Offset>(__t), __t.first); }
882 template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _A1,
883 typename... _As,
typename _Fp>
884 _GLIBCXX_SIMD_INTRINSIC constexpr
void
885 __for_each(_SimdTuple<_Tp, _A0, _A1, _As...>& __t, _Fp&& __fun)
887 __fun(__make_meta<_Offset>(__t), __t.first);
888 __for_each<_Offset + simd_size<_Tp, _A0>::value>(__t.second,
889 static_cast<_Fp&&
>(__fun));
893 template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _Fp>
894 _GLIBCXX_SIMD_INTRINSIC constexpr
void
895 __for_each(_SimdTuple<_Tp, _A0>& __a,
const _SimdTuple<_Tp, _A0>& __b,
898 static_cast<_Fp&&
>(__fun)(__make_meta<_Offset>(__a), __a.first, __b.first);
901 template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _A1,
902 typename... _As,
typename _Fp>
903 _GLIBCXX_SIMD_INTRINSIC constexpr
void
904 __for_each(_SimdTuple<_Tp, _A0, _A1, _As...>& __a,
905 const _SimdTuple<_Tp, _A0, _A1, _As...>& __b, _Fp&& __fun)
907 __fun(__make_meta<_Offset>(__a), __a.first, __b.first);
908 __for_each<_Offset + simd_size<_Tp, _A0>::value>(__a.second, __b.second,
909 static_cast<_Fp&&
>(__fun));
913 template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _Fp>
914 _GLIBCXX_SIMD_INTRINSIC constexpr
void
915 __for_each(
const _SimdTuple<_Tp, _A0>& __a,
const _SimdTuple<_Tp, _A0>& __b,
918 static_cast<_Fp&&
>(__fun)(__make_meta<_Offset>(__a), __a.first, __b.first);
921 template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _A1,
922 typename... _As,
typename _Fp>
923 _GLIBCXX_SIMD_INTRINSIC constexpr
void
924 __for_each(
const _SimdTuple<_Tp, _A0, _A1, _As...>& __a,
925 const _SimdTuple<_Tp, _A0, _A1, _As...>& __b, _Fp&& __fun)
927 __fun(__make_meta<_Offset>(__a), __a.first, __b.first);
928 __for_each<_Offset + simd_size<_Tp, _A0>::value>(__a.second, __b.second,
929 static_cast<_Fp&&
>(__fun));
934 template <
int _Index,
int _Total,
int _Combine,
typename _Tp,
typename _A0,
936 _GLIBCXX_SIMD_INTRINSIC
auto
937 __extract_part(
const _SimdTuple<_Tp, _A0, _As...>& __x)
943 using _Tuple = _SimdTuple<_Tp, _A0, _As...>;
944 static_assert(_Index + _Combine <= _Total && _Index >= 0 && _Total >= 1);
945 constexpr
size_t _Np = _Tuple::_S_size();
946 static_assert(_Np >= _Total && _Np % _Total == 0);
947 constexpr
size_t __values_per_part = _Np / _Total;
948 [[maybe_unused]] constexpr
size_t __values_to_skip
949 = _Index * __values_per_part;
950 constexpr
size_t __return_size = __values_per_part * _Combine;
951 using _RetAbi = simd_abi::deduce_t<_Tp, __return_size>;
954 if constexpr (_Index == 0 && _Tuple::_S_first_size == __return_size)
955 return __x.first._M_data;
956 else if constexpr (_Index == 0 && _Total == _Combine)
958 else if constexpr (_Index == 0 && _Tuple::_S_first_size >= __return_size)
959 return __intrin_bitcast<__vector_type_t<_Tp, __return_size>>(
960 __as_vector(__x.first));
963 else if constexpr (__values_to_skip >= _Tuple::_S_first_size)
965 if constexpr (_Tuple::_S_first_size % __values_per_part == 0)
967 constexpr
int __parts_in_first
968 = _Tuple::_S_first_size / __values_per_part;
969 return __extract_part<_Index - __parts_in_first,
970 _Total - __parts_in_first, _Combine>(
974 return __extract_part<__values_to_skip - _Tuple::_S_first_size,
975 _Np - _Tuple::_S_first_size, __return_size>(
980 else if constexpr (__return_size > _Tuple::_S_first_size - __values_to_skip)
982 #ifdef _GLIBCXX_SIMD_USE_ALIASING_LOADS
983 const __may_alias<_Tp>*
const element_ptr
984 =
reinterpret_cast<const __may_alias<_Tp>*
>(&__x) + __values_to_skip;
985 return __as_vector(simd<_Tp, _RetAbi>(element_ptr, element_aligned));
987 [[maybe_unused]] constexpr
size_t __offset = __values_to_skip;
988 return __as_vector(simd<_Tp, _RetAbi>([&](
auto __i) constexpr {
989 constexpr _SizeConstant<__i + __offset> __k;
996 else if constexpr (_Tuple::_S_first_size % __values_per_part == 0)
997 return __extract_part<_Index, _Tuple::_S_first_size / __values_per_part,
998 _Combine>(__x.first);
1000 return __extract_part<__values_to_skip, _Tuple::_S_first_size,
1001 _Combine * __values_per_part>(__x.first);
1006 template <typename _Tp,
int _Np, typename _Tuple,
1007 typename _Next = simd<_Tp, _AllNativeAbis::_BestAbi<_Tp, _Np>>,
1008 int _Remain = _Np -
int(_Next::
size())>
1009 struct __fixed_size_storage_builder;
1011 template <typename _Tp,
int _Np>
1012 struct __fixed_size_storage
1013 : public __fixed_size_storage_builder<_Tp, _Np, _SimdTuple<_Tp>> {};
1015 template <
typename _Tp,
int _Np,
typename... _As,
typename _Next>
1016 struct __fixed_size_storage_builder<_Tp, _Np, _SimdTuple<_Tp, _As...>, _Next,
1018 {
using type = _SimdTuple<_Tp, _As...,
typename _Next::abi_type>; };
1020 template <
typename _Tp,
int _Np,
typename... _As,
typename _Next,
int _Remain>
1021 struct __fixed_size_storage_builder<_Tp, _Np, _SimdTuple<_Tp, _As...>, _Next,
1024 using type =
typename __fixed_size_storage_builder<
1025 _Tp, _Remain, _SimdTuple<_Tp, _As...,
typename _Next::abi_type>>::type;
1030 template <
typename _Tp>
1033 template <
size_t _I0,
size_t... _Is>
1038 template <
size_t _First,
size_t _Add>
1039 using _Prepend =
index_sequence<_First, _I0 + _Add, (_Is + _Add)...>;
1042 template <
typename _Tp>
1043 struct _AbisInSimdTuple;
1045 template <
typename _Tp>
1046 struct _AbisInSimdTuple<_SimdTuple<_Tp>>
1048 using _Counts = index_sequence<0>;
1049 using _Begins = index_sequence<0>;
1052 template <
typename _Tp,
typename _Ap>
1053 struct _AbisInSimdTuple<_SimdTuple<_Tp, _Ap>>
1055 using _Counts = index_sequence<1>;
1056 using _Begins = index_sequence<0>;
1059 template <
typename _Tp,
typename _A0,
typename... _As>
1060 struct _AbisInSimdTuple<_SimdTuple<_Tp, _A0, _A0, _As...>>
1062 using _Counts =
typename _SeqOp<
typename _AbisInSimdTuple<
1063 _SimdTuple<_Tp, _A0, _As...>>::_Counts>::_FirstPlusOne;
1064 using _Begins =
typename _SeqOp<
typename _AbisInSimdTuple<
1065 _SimdTuple<_Tp, _A0, _As...>>::_Begins>::_NotFirstPlusOne;
1068 template <
typename _Tp,
typename _A0,
typename _A1,
typename... _As>
1069 struct _AbisInSimdTuple<_SimdTuple<_Tp, _A0, _A1, _As...>>
1071 using _Counts =
typename _SeqOp<
typename _AbisInSimdTuple<
1072 _SimdTuple<_Tp, _A1, _As...>>::_Counts>::template _Prepend<1, 0>;
1073 using _Begins =
typename _SeqOp<
typename _AbisInSimdTuple<
1074 _SimdTuple<_Tp, _A1, _As...>>::_Begins>::template _Prepend<0, 1>;
1079 template <
typename _Tp,
bool = is_arithmetic_v<__remove_cvref_t<_Tp>>>
1080 struct __autocvt_to_simd
1083 using _TT = __remove_cvref_t<_Tp>;
1090 static_assert(is_lvalue_reference<_Tp>::value,
"");
1091 static_assert(!is_const<_Tp>::value,
"");
1097 static_assert(is_lvalue_reference<_Tp>::value,
"");
1098 static_assert(!is_const<_Tp>::value,
"");
1102 constexpr
inline __autocvt_to_simd(_Tp dd) : _M_data(dd) {}
1104 template <
typename _Abi>
1105 operator simd<typename _TT::value_type, _Abi>()
1106 {
return {__private_init, _M_data}; }
1108 template <
typename _Abi>
1109 operator simd<typename _TT::value_type, _Abi>&()
1111 return *
reinterpret_cast<simd<typename _TT::value_type, _Abi>*
>(
1115 template <
typename _Abi>
1116 operator simd<typename _TT::value_type, _Abi>*()
1118 return reinterpret_cast<simd<typename _TT::value_type, _Abi>*
>(
1123 template <
typename _Tp>
1124 __autocvt_to_simd(_Tp &&) -> __autocvt_to_simd<_Tp>;
1126 template <
typename _Tp>
1127 struct __autocvt_to_simd<_Tp, true>
1129 using _TT = __remove_cvref_t<_Tp>;
1131 fixed_size_simd<_TT, 1> _M_fd;
1133 constexpr
inline __autocvt_to_simd(_Tp dd) : _M_data(dd), _M_fd(_M_data) {}
1135 ~__autocvt_to_simd()
1136 { _M_data = __data(_M_fd).first; }
1138 operator fixed_size_simd<_TT, 1>()
1141 operator fixed_size_simd<_TT, 1> &()
1143 static_assert(is_lvalue_reference<_Tp>::value,
"");
1144 static_assert(!is_const<_Tp>::value,
"");
1148 operator fixed_size_simd<_TT, 1> *()
1150 static_assert(is_lvalue_reference<_Tp>::value,
"");
1151 static_assert(!is_const<_Tp>::value,
"");
1158 struct _CommonImplFixedSize;
1159 template <
int _Np>
struct _SimdImplFixedSize;
1160 template <
int _Np>
struct _MaskImplFixedSize;
1163 struct simd_abi::_Fixed
1165 template <
typename _Tp>
static constexpr
size_t _S_size = _Np;
1166 template <
typename _Tp>
static constexpr
size_t _S_full_size = _Np;
1168 struct _IsValidAbiTag :
public __bool_constant<(_Np > 0)> {};
1170 template <
typename _Tp>
1171 struct _IsValidSizeFor
1172 : __bool_constant<(_Np <= simd_abi::max_fixed_size<_Tp>)> {};
1174 template <typename _Tp>
1175 struct _IsValid : conjunction<_IsValidAbiTag, __is_vectorizable<_Tp>,
1176 _IsValidSizeFor<_Tp>> {};
1178 template <typename _Tp>
1179 static constexpr bool _S_is_valid_v = _IsValid<_Tp>::value;
1183 _GLIBCXX_SIMD_INTRINSIC static constexpr _SanitizedBitMask<_Np>
1184 _S_masked(_BitMask<_Np> __x)
1185 { return __x._M_sanitized(); }
1187 _GLIBCXX_SIMD_INTRINSIC static constexpr _SanitizedBitMask<_Np>
1188 _S_masked(_SanitizedBitMask<_Np> __x)
1193 using _CommonImpl = _CommonImplFixedSize;
1194 using _SimdImpl = _SimdImplFixedSize<_Np>;
1195 using _MaskImpl = _MaskImplFixedSize<_Np>;
1199 template <typename _Tp, bool = _S_is_valid_v<_Tp>>
1200 struct __traits : _InvalidTraits {};
1202 template <typename _Tp>
1203 struct __traits<_Tp, true>
1205 using _IsValid = true_type;
1206 using _SimdImpl = _SimdImplFixedSize<_Np>;
1207 using _MaskImpl = _MaskImplFixedSize<_Np>;
1210 using _SimdMember = __fixed_size_storage_t<_Tp, _Np>;
1211 using _MaskMember = _SanitizedBitMask<_Np>;
1213 static constexpr size_t _S_simd_align
1214 = std::__bit_ceil(_Np * sizeof(_Tp));
1216 static constexpr size_t _S_mask_align = alignof(_MaskMember);
1224 _SimdBase(const _SimdBase&) {}
1225 _SimdBase() = default;
1227 explicit operator const _SimdMember &() const
1228 { return static_cast<const simd<_Tp, _Fixed>*>(this)->_M_data; }
1230 explicit operator array<_Tp, _Np>() const
1232 array<_Tp, _Np> __r;
1234 static_assert(
sizeof(__r) <=
sizeof(_SimdMember),
"");
1235 __builtin_memcpy(__r.data(), &
static_cast<const _SimdMember&
>(*
this),
1244 struct _MaskBase {};
1248 struct _SimdCastType
1250 _SimdCastType(
const array<_Tp, _Np>&);
1251 _SimdCastType(
const _SimdMember& dd) : _M_data(dd) {}
1252 explicit operator const _SimdMember &()
const {
return _M_data; }
1255 const _SimdMember& _M_data;
1262 _MaskCastType() =
delete;
1271 struct _CommonImplFixedSize
1274 template <
typename _Tp,
typename... _As>
1275 _GLIBCXX_SIMD_INTRINSIC
static void
1276 _S_store(
const _SimdTuple<_Tp, _As...>& __x,
void* __addr)
1278 constexpr
size_t _Np = _SimdTuple<_Tp, _As...>::_S_size();
1279 __builtin_memcpy(__addr, &__x, _Np *
sizeof(_Tp));
1290 struct _SimdImplFixedSize
1293 using _MaskMember = _SanitizedBitMask<_Np>;
1295 template <
typename _Tp>
1296 using _SimdMember = __fixed_size_storage_t<_Tp, _Np>;
1298 template <
typename _Tp>
1299 static constexpr
size_t _S_tuple_size = _SimdMember<_Tp>::_S_tuple_size;
1301 template <
typename _Tp>
1302 using _Simd = simd<_Tp, simd_abi::fixed_size<_Np>>;
1304 template <
typename _Tp>
1305 using _TypeTag = _Tp*;
1308 template <
typename _Tp>
1309 static constexpr
inline _SimdMember<_Tp> _S_broadcast(_Tp __x) noexcept
1311 return _SimdMember<_Tp>::_S_generate([&](
auto __meta) constexpr {
1312 return __meta._S_broadcast(__x);
1317 template <
typename _Fp,
typename _Tp>
1318 static constexpr
inline _SimdMember<_Tp> _S_generator(_Fp&& __gen,
1321 return _SimdMember<_Tp>::_S_generate([&__gen](
auto __meta) constexpr {
1322 return __meta._S_generator(
1323 [&](
auto __i) constexpr {
1324 return __i < _Np ? __gen(_SizeConstant<__meta._S_offset + __i>())
1332 template <
typename _Tp,
typename _Up>
1333 static inline _SimdMember<_Tp> _S_load(
const _Up* __mem,
1334 _TypeTag<_Tp>) noexcept
1336 return _SimdMember<_Tp>::_S_generate([&](
auto __meta) {
1337 return __meta._S_load(&__mem[__meta._S_offset], _TypeTag<_Tp>());
1342 template <
typename _Tp,
typename... _As,
typename _Up>
1343 static inline _SimdTuple<_Tp, _As...>
1344 _S_masked_load(
const _SimdTuple<_Tp, _As...>& __old,
1345 const _MaskMember __bits,
const _Up* __mem) noexcept
1347 auto __merge = __old;
1348 __for_each(__merge, [&](
auto __meta,
auto& __native) {
1349 if (__meta._S_submask(__bits).any())
1350 #pragma GCC diagnostic push
1355 #pragma GCC diagnostic ignored
"-Warray-bounds"
1357 = __meta._S_masked_load(__native, __meta._S_make_mask(__bits),
1358 __mem + __meta._S_offset);
1359 #pragma GCC diagnostic pop
1365 template <
typename _Tp,
typename _Up>
1366 static inline void _S_store(
const _SimdMember<_Tp>& __v, _Up* __mem,
1367 _TypeTag<_Tp>) noexcept
1369 __for_each(__v, [&](
auto __meta,
auto __native) {
1370 __meta._S_store(__native, &__mem[__meta._S_offset], _TypeTag<_Tp>());
1375 template <
typename _Tp,
typename... _As,
typename _Up>
1376 static inline void _S_masked_store(
const _SimdTuple<_Tp, _As...>& __v,
1378 const _MaskMember __bits) noexcept
1380 __for_each(__v, [&](
auto __meta,
auto __native) {
1381 if (__meta._S_submask(__bits).any())
1382 #pragma GCC diagnostic push
1387 #pragma GCC diagnostic ignored
"-Warray-bounds"
1388 __meta._S_masked_store(__native, __mem + __meta._S_offset,
1389 __meta._S_make_mask(__bits));
1390 #pragma GCC diagnostic pop
1395 template <
typename _Tp,
typename... _As>
1396 static inline _MaskMember
1397 _S_negate(
const _SimdTuple<_Tp, _As...>& __x) noexcept
1399 _MaskMember __bits = 0;
1401 __x, [&__bits](
auto __meta,
auto __native) constexpr {
1403 |= __meta._S_mask_to_shifted_ullong(__meta._S_negate(__native));
1409 template <
typename _Tp,
typename _BinaryOperation>
1410 static constexpr
inline _Tp _S_reduce(
const _Simd<_Tp>& __x,
1411 const _BinaryOperation& __binary_op)
1413 using _Tup = _SimdMember<_Tp>;
1414 const _Tup& __tup = __data(__x);
1415 if constexpr (_Tup::_S_tuple_size == 1)
1416 return _Tup::_FirstAbi::_SimdImpl::_S_reduce(
1417 __tup.template _M_simd_at<0>(), __binary_op);
1418 else if constexpr (_Tup::_S_tuple_size == 2 && _Tup::_S_size() > 2
1419 && _Tup::_SecondType::_S_size() == 1)
1421 return __binary_op(simd<_Tp, simd_abi::scalar>(
1422 reduce(__tup.template _M_simd_at<0>(),
1424 __tup.template _M_simd_at<1>())[0];
1426 else if constexpr (_Tup::_S_tuple_size == 2 && _Tup::_S_size() > 4
1427 && _Tup::_SecondType::_S_size() == 2)
1430 simd<_Tp, simd_abi::scalar>(
1431 reduce(__tup.template _M_simd_at<0>(), __binary_op)),
1432 simd<_Tp, simd_abi::scalar>(
1433 reduce(__tup.template _M_simd_at<1>(), __binary_op)))[0];
1437 const auto& __x2 = __call_with_n_evaluations<
1438 __div_roundup(_Tup::_S_tuple_size, 2)>(
1439 [](
auto __first_simd,
auto... __remaining) {
1440 if constexpr (
sizeof...(__remaining) == 0)
1441 return __first_simd;
1446 typename decltype(__first_simd)::abi_type,
1447 typename decltype(__remaining)::abi_type...>;
1448 return fixed_size_simd<_Tp, _Tup2::_S_size()>(
1450 __make_simd_tuple(__first_simd, __remaining...));
1454 auto __left = __tup.template _M_simd_at<2 * __i>();
1455 if constexpr (2 * __i + 1 == _Tup::_S_tuple_size)
1459 auto __right = __tup.template _M_simd_at<2 * __i + 1>();
1460 using _LT = decltype(__left);
1461 using _RT = decltype(__right);
1463 return __binary_op(__left, __right);
1466 _GLIBCXX_SIMD_USE_CONSTEXPR_API
1467 typename _LT::mask_type __k(
1469 [](
auto __j) constexpr {
return __j <
_RT::size(); });
1470 _LT __ext_right = __left;
1471 where(__k, __ext_right)
1472 = __proposed::resizing_simd_cast<_LT>(__right);
1473 where(__k, __left) = __binary_op(__left, __ext_right);
1478 return reduce(__x2, __binary_op);
1483 template <
typename _Tp,
typename... _As>
1484 static inline constexpr _SimdTuple<_Tp, _As...>
1485 _S_min(
const _SimdTuple<_Tp, _As...>& __a,
1486 const _SimdTuple<_Tp, _As...>& __b)
1488 return __a._M_apply_per_chunk(
1489 [](
auto __impl,
auto __aa,
auto __bb) constexpr {
1490 return __impl._S_min(__aa, __bb);
1495 template <
typename _Tp,
typename... _As>
1496 static inline constexpr _SimdTuple<_Tp, _As...>
1497 _S_max(
const _SimdTuple<_Tp, _As...>& __a,
1498 const _SimdTuple<_Tp, _As...>& __b)
1500 return __a._M_apply_per_chunk(
1501 [](
auto __impl,
auto __aa,
auto __bb) constexpr {
1502 return __impl._S_max(__aa, __bb);
1508 template <
typename _Tp,
typename... _As>
1509 static inline constexpr _SimdTuple<_Tp, _As...>
1510 _S_complement(
const _SimdTuple<_Tp, _As...>& __x) noexcept
1512 return __x._M_apply_per_chunk([](
auto __impl,
auto __xx) constexpr {
1513 return __impl._S_complement(__xx);
1518 template <
typename _Tp,
typename... _As>
1519 static inline constexpr _SimdTuple<_Tp, _As...>
1520 _S_unary_minus(
const _SimdTuple<_Tp, _As...>& __x) noexcept
1522 return __x._M_apply_per_chunk([](
auto __impl,
auto __xx) constexpr {
1523 return __impl._S_unary_minus(__xx);
1529 #define _GLIBCXX_SIMD_FIXED_OP(name_, op_) \
1530 template <typename _Tp, typename... _As> \
1531 static inline constexpr _SimdTuple<_Tp, _As...> name_( \
1532 const _SimdTuple<_Tp, _As...> __x, const _SimdTuple<_Tp, _As...> __y) \
1534 return __x._M_apply_per_chunk( \
1535 [](auto __impl, auto __xx, auto __yy) constexpr { \
1536 return __impl.name_(__xx, __yy); \
1541 _GLIBCXX_SIMD_FIXED_OP(_S_plus, +)
1542 _GLIBCXX_SIMD_FIXED_OP(_S_minus, -)
1543 _GLIBCXX_SIMD_FIXED_OP(_S_multiplies, *)
1544 _GLIBCXX_SIMD_FIXED_OP(_S_divides, /)
1545 _GLIBCXX_SIMD_FIXED_OP(_S_modulus, %)
1546 _GLIBCXX_SIMD_FIXED_OP(_S_bit_and, &)
1547 _GLIBCXX_SIMD_FIXED_OP(_S_bit_or, |)
1548 _GLIBCXX_SIMD_FIXED_OP(_S_bit_xor, ^)
1549 _GLIBCXX_SIMD_FIXED_OP(_S_bit_shift_left, <<)
1550 _GLIBCXX_SIMD_FIXED_OP(_S_bit_shift_right, >>)
1551 #undef _GLIBCXX_SIMD_FIXED_OP
1553 template <
typename _Tp,
typename... _As>
1554 static inline constexpr _SimdTuple<_Tp, _As...>
1555 _S_bit_shift_left(
const _SimdTuple<_Tp, _As...>& __x,
int __y)
1557 return __x._M_apply_per_chunk([__y](
auto __impl,
auto __xx) constexpr {
1558 return __impl._S_bit_shift_left(__xx, __y);
1562 template <
typename _Tp,
typename... _As>
1563 static inline constexpr _SimdTuple<_Tp, _As...>
1564 _S_bit_shift_right(
const _SimdTuple<_Tp, _As...>& __x,
int __y)
1566 return __x._M_apply_per_chunk([__y](
auto __impl,
auto __xx) constexpr {
1567 return __impl._S_bit_shift_right(__xx, __y);
1572 #define _GLIBCXX_SIMD_APPLY_ON_TUPLE(_RetTp, __name) \
1573 template <typename _Tp, typename... _As, typename... _More> \
1574 static inline __fixed_size_storage_t<_RetTp, _Np> \
1575 _S_##__name(const _SimdTuple<_Tp, _As...>& __x, \
1576 const _More&... __more) \
1578 if constexpr (sizeof...(_More) == 0) \
1580 if constexpr (is_same_v<_Tp, _RetTp>) \
1581 return __x._M_apply_per_chunk( \
1582 [](auto __impl, auto __xx) constexpr { \
1583 using _V = typename decltype(__impl)::simd_type; \
1584 return __data(__name(_V(__private_init, __xx))); \
1587 return __optimize_simd_tuple( \
1588 __x.template _M_apply_r<_RetTp>([](auto __impl, auto __xx) { \
1589 return __impl._S_##__name(__xx); \
1592 else if constexpr ( \
1595 _RetTp> && (... && is_same_v<_SimdTuple<_Tp, _As...>, _More>) ) \
1596 return __x._M_apply_per_chunk( \
1597 [](auto __impl, auto __xx, auto... __pack) constexpr { \
1598 using _V = typename decltype(__impl)::simd_type; \
1599 return __data(__name(_V(__private_init, __xx), \
1600 _V(__private_init, __pack)...)); \
1603 else if constexpr (is_same_v<_Tp, _RetTp>) \
1604 return __x._M_apply_per_chunk( \
1605 [](auto __impl, auto __xx, auto... __pack) constexpr { \
1606 using _V = typename decltype(__impl)::simd_type; \
1607 return __data(__name(_V(__private_init, __xx), \
1608 __autocvt_to_simd(__pack)...)); \
1612 __assert_unreachable<_Tp>(); \
1615 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
acos)
1616 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
asin)
1617 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
atan)
1618 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, atan2)
1619 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
cos)
1620 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
sin)
1621 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
tan)
1622 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
acosh)
1623 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
asinh)
1624 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
atanh)
1625 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
cosh)
1626 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
sinh)
1627 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
tanh)
1628 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
exp)
1629 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, exp2)
1630 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, expm1)
1631 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
int, ilogb)
1632 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
log)
1633 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
log10)
1634 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, log1p)
1635 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, log2)
1636 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, logb)
1638 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
1640 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, scalbln)
1641 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, cbrt)
1642 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, abs)
1643 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
fabs)
1644 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
pow)
1645 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
sqrt)
1646 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, erf)
1647 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, erfc)
1648 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, lgamma)
1649 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, tgamma)
1650 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, trunc)
1651 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, ceil)
1652 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, floor)
1653 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, nearbyint)
1655 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, rint)
1656 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
long, lrint)
1657 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
long long, llrint)
1659 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, round)
1660 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
long, lround)
1661 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
long long, llround)
1663 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, ldexp)
1664 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fmod)
1665 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, remainder)
1667 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, nextafter)
1668 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fdim)
1669 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fmax)
1670 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fmin)
1671 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fma)
1672 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
int, fpclassify)
1673 #undef _GLIBCXX_SIMD_APPLY_ON_TUPLE
1675 template <
typename _Tp,
typename... _Abis>
1676 static _SimdTuple<_Tp, _Abis...> _S_remquo(
1677 const _SimdTuple<_Tp, _Abis...>& __x,
1678 const _SimdTuple<_Tp, _Abis...>& __y,
1679 __fixed_size_storage_t<
int, _SimdTuple<_Tp, _Abis...>::_S_size()>* __z)
1681 return __x._M_apply_per_chunk(
1682 [](
auto __impl,
const auto __xx,
const auto __yy,
auto& __zz) {
1683 return __impl._S_remquo(__xx, __yy, &__zz);
1688 template <
typename _Tp,
typename... _As>
1689 static inline _SimdTuple<_Tp, _As...>
1690 _S_frexp(
const _SimdTuple<_Tp, _As...>& __x,
1691 __fixed_size_storage_t<int, _Np>& __exp) noexcept
1693 return __x._M_apply_per_chunk(
1694 [](
auto __impl,
const auto& __a,
auto& __b) {
1696 frexp(
typename decltype(__impl)::simd_type(__private_init, __a),
1697 __autocvt_to_simd(__b)));
1702 #define _GLIBCXX_SIMD_TEST_ON_TUPLE_(name_) \
1703 template <typename _Tp, typename... _As> \
1704 static inline _MaskMember \
1705 _S_##name_(const _SimdTuple<_Tp, _As...>& __x) noexcept \
1707 return _M_test([](auto __impl, \
1708 auto __xx) { return __impl._S_##name_(__xx); }, \
1712 _GLIBCXX_SIMD_TEST_ON_TUPLE_(isinf)
1713 _GLIBCXX_SIMD_TEST_ON_TUPLE_(isfinite)
1714 _GLIBCXX_SIMD_TEST_ON_TUPLE_(isnan)
1715 _GLIBCXX_SIMD_TEST_ON_TUPLE_(isnormal)
1716 _GLIBCXX_SIMD_TEST_ON_TUPLE_(signbit)
1717 #undef _GLIBCXX_SIMD_TEST_ON_TUPLE_
1720 template <
typename... _Ts>
1721 _GLIBCXX_SIMD_INTRINSIC
static constexpr
void
1722 _S_increment(_SimdTuple<_Ts...>& __x)
1725 __x, [](
auto __meta,
auto& native) constexpr {
1726 __meta._S_increment(native);
1730 template <
typename... _Ts>
1731 _GLIBCXX_SIMD_INTRINSIC
static constexpr
void
1732 _S_decrement(_SimdTuple<_Ts...>& __x)
1735 __x, [](
auto __meta,
auto& native) constexpr {
1736 __meta._S_decrement(native);
1741 #define _GLIBCXX_SIMD_CMP_OPERATIONS(__cmp) \
1742 template <typename _Tp, typename... _As> \
1743 _GLIBCXX_SIMD_INTRINSIC constexpr static _MaskMember \
1744 __cmp(const _SimdTuple<_Tp, _As...>& __x, \
1745 const _SimdTuple<_Tp, _As...>& __y) \
1748 [](auto __impl, auto __xx, auto __yy) constexpr { \
1749 return __impl.__cmp(__xx, __yy); \
1754 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_equal_to)
1755 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_not_equal_to)
1756 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_less)
1757 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_less_equal)
1758 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_isless)
1759 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_islessequal)
1760 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_isgreater)
1761 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_isgreaterequal)
1762 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_islessgreater)
1763 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_isunordered)
1764 #undef _GLIBCXX_SIMD_CMP_OPERATIONS
1767 template <
typename _Tp,
typename... _As,
typename _Up>
1768 _GLIBCXX_SIMD_INTRINSIC
static void _S_set(_SimdTuple<_Tp, _As...>& __v,
1769 int __i, _Up&& __x) noexcept
1770 { __v._M_set(__i,
static_cast<_Up&&
>(__x)); }
1773 template <
typename _Tp,
typename... _As>
1774 _GLIBCXX_SIMD_INTRINSIC
static void
1775 _S_masked_assign(
const _MaskMember __bits, _SimdTuple<_Tp, _As...>& __lhs,
1776 const __type_identity_t<_SimdTuple<_Tp, _As...>>& __rhs)
1780 [&](
auto __meta,
auto& __native_lhs,
auto __native_rhs) constexpr {
1781 __meta._S_masked_assign(__meta._S_make_mask(__bits), __native_lhs,
1788 template <
typename _Tp,
typename... _As>
1789 _GLIBCXX_SIMD_INTRINSIC
static void
1790 _S_masked_assign(
const _MaskMember __bits, _SimdTuple<_Tp, _As...>& __lhs,
1791 const __type_identity_t<_Tp> __rhs)
1794 __lhs, [&](
auto __meta,
auto& __native_lhs) constexpr {
1795 __meta._S_masked_assign(__meta._S_make_mask(__bits), __native_lhs,
1801 template <
typename _Op,
typename _Tp,
typename... _As>
1802 static inline void _S_masked_cassign(
const _MaskMember __bits,
1803 _SimdTuple<_Tp, _As...>& __lhs,
1804 const _SimdTuple<_Tp, _As...>& __rhs,
1809 [&](
auto __meta,
auto& __native_lhs,
auto __native_rhs) constexpr {
1810 __meta.template _S_masked_cassign(__meta._S_make_mask(__bits),
1811 __native_lhs, __native_rhs, __op);
1817 template <
typename _Op,
typename _Tp,
typename... _As>
1818 static inline void _S_masked_cassign(
const _MaskMember __bits,
1819 _SimdTuple<_Tp, _As...>& __lhs,
1820 const _Tp& __rhs, _Op __op)
1823 __lhs, [&](
auto __meta,
auto& __native_lhs) constexpr {
1824 __meta.template _S_masked_cassign(__meta._S_make_mask(__bits),
1825 __native_lhs, __rhs, __op);
1830 template <
template <
typename>
class _Op,
typename _Tp,
typename... _As>
1831 static inline _SimdTuple<_Tp, _As...>
1832 _S_masked_unary(
const _MaskMember __bits,
1833 const _SimdTuple<_Tp, _As...> __v)
1835 return __v._M_apply_wrapped([&__bits](
auto __meta,
1836 auto __native) constexpr {
1837 return __meta.template _S_masked_unary<_Op>(__meta._S_make_mask(
1848 struct _MaskImplFixedSize
1851 sizeof(_ULLong) * __CHAR_BIT__ >= _Np,
1852 "The fixed_size implementation relies on one _ULLong being able to store "
1853 "all boolean elements.");
1856 using _Abi = simd_abi::fixed_size<_Np>;
1858 using _MaskMember = _SanitizedBitMask<_Np>;
1860 template <
typename _Tp>
1861 using _FirstAbi =
typename __fixed_size_storage_t<_Tp, _Np>::_FirstAbi;
1863 template <
typename _Tp>
1864 using _TypeTag = _Tp*;
1869 _GLIBCXX_SIMD_INTRINSIC
static constexpr _MaskMember
1870 _S_broadcast(
bool __x)
1871 {
return __x ? ~_MaskMember() : _MaskMember(); }
1876 _GLIBCXX_SIMD_INTRINSIC
static constexpr _MaskMember
1877 _S_load(
const bool* __mem)
1879 using _Ip = __int_for_sizeof_t<bool>;
1883 const simd<_Ip, _Abi> __bools(
reinterpret_cast<const __may_alias<_Ip>*
>(
1886 return __data(__bools != 0);
1891 template <
bool _Sanitized>
1892 _GLIBCXX_SIMD_INTRINSIC
static constexpr _SanitizedBitMask<_Np>
1893 _S_to_bits(_BitMask<_Np, _Sanitized> __x)
1895 if constexpr (_Sanitized)
1898 return __x._M_sanitized();
1903 template <
typename _Tp,
typename _Up,
typename _UAbi>
1904 _GLIBCXX_SIMD_INTRINSIC
static constexpr _MaskMember
1905 _S_convert(simd_mask<_Up, _UAbi> __x)
1907 return _UAbi::_MaskImpl::_S_to_bits(__data(__x))
1908 .template _M_extract<0, _Np>();
1913 template <
typename _Tp>
1914 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1915 _S_from_bitmask(_MaskMember __bits, _TypeTag<_Tp>) noexcept
1919 static inline _MaskMember _S_load(
const bool* __mem) noexcept
1924 using _Vs = __fixed_size_storage_t<_UChar, _Np>;
1925 __for_each(_Vs{}, [&](
auto __meta,
auto) {
1926 __r |= __meta._S_mask_to_shifted_ullong(
1927 __meta._S_mask_impl._S_load(&__mem[__meta._S_offset],
1928 _SizeConstant<__meta._S_size()>()));
1934 static inline _MaskMember _S_masked_load(_MaskMember __merge,
1936 const bool* __mem) noexcept
1938 _BitOps::_S_bit_iteration(__mask.to_ullong(), [&](
auto __i) {
1939 __merge.set(__i, __mem[__i]);
1945 static inline void _S_store(
const _MaskMember __bitmask,
1946 bool* __mem) noexcept
1948 if constexpr (_Np == 1)
1949 __mem[0] = __bitmask[0];
1951 _FirstAbi<_UChar>::_CommonImpl::_S_store_bool_array(__bitmask, __mem);
1955 static inline
void _S_masked_store(const _MaskMember __v,
bool* __mem,
1956 const _MaskMember __k) noexcept
1958 _BitOps::_S_bit_iteration(__k, [&](
auto __i) { __mem[__i] = __v[__i]; });
1962 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1963 _S_logical_and(
const _MaskMember& __x,
const _MaskMember& __y) noexcept
1964 {
return __x & __y; }
1966 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1967 _S_logical_or(
const _MaskMember& __x,
const _MaskMember& __y) noexcept
1968 {
return __x | __y; }
1970 _GLIBCXX_SIMD_INTRINSIC
static constexpr _MaskMember
1971 _S_bit_not(
const _MaskMember& __x) noexcept
1974 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1975 _S_bit_and(
const _MaskMember& __x,
const _MaskMember& __y) noexcept
1976 {
return __x & __y; }
1978 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1979 _S_bit_or(
const _MaskMember& __x,
const _MaskMember& __y) noexcept
1980 {
return __x | __y; }
1982 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1983 _S_bit_xor(
const _MaskMember& __x,
const _MaskMember& __y) noexcept
1984 {
return __x ^ __y; }
1987 _GLIBCXX_SIMD_INTRINSIC
static void _S_set(_MaskMember& __k,
int __i,
1989 { __k.set(__i, __x); }
1992 _GLIBCXX_SIMD_INTRINSIC
static void
1993 _S_masked_assign(
const _MaskMember __k, _MaskMember& __lhs,
1994 const _MaskMember __rhs)
1995 { __lhs = (__lhs & ~__k) | (__rhs & __k); }
1998 _GLIBCXX_SIMD_INTRINSIC
static void _S_masked_assign(
const _MaskMember __k,
2010 template <
typename _Tp>
2011 _GLIBCXX_SIMD_INTRINSIC
static bool _S_all_of(simd_mask<_Tp, _Abi> __k)
2012 {
return __data(__k).all(); }
2016 template <
typename _Tp>
2017 _GLIBCXX_SIMD_INTRINSIC
static bool _S_any_of(simd_mask<_Tp, _Abi> __k)
2018 {
return __data(__k).any(); }
2022 template <
typename _Tp>
2023 _GLIBCXX_SIMD_INTRINSIC
static bool _S_none_of(simd_mask<_Tp, _Abi> __k)
2024 {
return __data(__k).none(); }
2028 template <
typename _Tp>
2029 _GLIBCXX_SIMD_INTRINSIC
static bool
2030 _S_some_of([[maybe_unused]] simd_mask<_Tp, _Abi> __k)
2032 if constexpr (_Np == 1)
2035 return __data(__k).any() && !__data(__k).all();
2040 template <typename _Tp>
2041 _GLIBCXX_SIMD_INTRINSIC static
int _S_popcount(simd_mask<_Tp, _Abi> __k)
2042 {
return __data(__k).count(); }
2046 template <
typename _Tp>
2047 _GLIBCXX_SIMD_INTRINSIC
static int
2048 _S_find_first_set(simd_mask<_Tp, _Abi> __k)
2049 {
return std::__countr_zero(__data(__k).to_ullong()); }
2053 template <
typename _Tp>
2054 _GLIBCXX_SIMD_INTRINSIC
static int
2055 _S_find_last_set(simd_mask<_Tp, _Abi> __k)
2056 {
return std::__bit_width(__data(__k).to_ullong()) - 1; }
2062 _GLIBCXX_SIMD_END_NAMESPACE
complex< _Tp > log10(const complex< _Tp > &)
Return complex base 10 logarithm of z.
complex< _Tp > sin(const complex< _Tp > &)
Return complex sine of z.
complex< _Tp > log(const complex< _Tp > &)
Return complex natural logarithm of z.
complex< _Tp > tan(const complex< _Tp > &)
Return complex tangent of z.
complex< _Tp > exp(const complex< _Tp > &)
Return complex base e exponential of z.
complex< _Tp > cosh(const complex< _Tp > &)
Return complex hyperbolic cosine of z.
complex< _Tp > tanh(const complex< _Tp > &)
Return complex hyperbolic tangent of z.
complex< _Tp > pow(const complex< _Tp > &, int)
Return x to the y'th power.
complex< _Tp > sinh(const complex< _Tp > &)
Return complex hyperbolic sine of z.
complex< _Tp > cos(const complex< _Tp > &)
Return complex cosine of z.
complex< _Tp > sqrt(const complex< _Tp > &)
Return complex square root of z.
constexpr _Tp reduce(_InputIterator __first, _InputIterator __last, _Tp __init, _BinaryOperation __binary_op)
Calculate reduction of values in a range.
_Tp fabs(const std::complex< _Tp > &)
fabs(__z) [8.1.8].
std::complex< _Tp > asinh(const std::complex< _Tp > &)
asinh(__z) [8.1.6].
std::complex< _Tp > atan(const std::complex< _Tp > &)
atan(__z) [8.1.4].
constexpr auto size(const _Container &__cont) noexcept(noexcept(__cont.size())) -> decltype(__cont.size())
Return the size of a container.
std::complex< _Tp > atanh(const std::complex< _Tp > &)
atanh(__z) [8.1.7].
std::complex< _Tp > acosh(const std::complex< _Tp > &)
acosh(__z) [8.1.5].
integer_sequence< size_t, _Idx... > index_sequence
Alias template index_sequence.
std::complex< _Tp > acos(const std::complex< _Tp > &)
acos(__z) [8.1.2].
std::complex< _Tp > asin(const std::complex< _Tp > &)
asin(__z) [8.1.3].
constexpr _Iterator __base(_Iterator __it)