7#ifndef KIS_XSIMD_GENERIC_DETAILS_DECL_HPP
8#define KIS_XSIMD_GENERIC_DETAILS_DECL_HPP
19template<
typename V,
typename T,
typename A>
27template<
typename T,
typename A>
28inline auto set_zero(
const batch<T, A> &src,
const batch_bool<T, A> &mask)
noexcept;
31template<
typename T,
typename A>
32inline auto set_one(
const batch<T, A> &src,
const batch_bool<T, A> &mask)
noexcept;
39template<
typename T,
typename T2>
47template<
typename T,
typename A = xsimd::current_arch>
52template<
typename T,
typename A = xsimd::current_arch>
66inline std::pair<V, V>
interleave(
const V &a,
const V &b)
noexcept;
68template<
typename T,
typename A>
69inline xsimd::batch<T, A>
pow2(xsimd::batch<T, A>
const &self)
noexcept;
79template<
class F,
class A,
class T>
82 alignas(A::alignment()) std::array<T, batch<T, A>::size> self_buffer;
83 self.store_aligned(self_buffer.data());
84 for (std::size_t i = 0; i < batch<T, A>::size; ++i) {
85 self_buffer[i] = func(self_buffer[i]);
87 return batch<T, A>::load_aligned(self_buffer.data());
90template<
class F,
class A,
class T>
93 alignas(A::alignment()) std::array<T, batch<T, A>::size> self_buffer;
94 self.store_aligned(self_buffer.data());
95 for (std::size_t i = 0; i < batch<T, A>::size; ++i) {
96 self_buffer[i] = func(i, self_buffer[i]);
98 return batch<T, A>::load_aligned(self_buffer.data());
batch< T, A > apply_with_index_and_value(F &&func, batch< T, A > const &self) noexcept
batch< T, A > apply_with_value(F &&func, batch< T, A > const &self) noexcept
std::pair< V, V > interleave(const V &a, const V &b) noexcept
T * vector_aligned_malloc(size_t sz) noexcept
auto set_one(const batch< T, A > &src, const batch_bool< T, A > &mask) noexcept
batch< T, A > pow2(batch< T, A > const &self) noexcept
auto set_zero(const batch< T, A > &src, const batch_bool< T, A > &mask) noexcept
void vector_aligned_free(const T *ptr) noexcept
T * aligned_malloc(size_t size) noexcept
batch< T, A > truncate_to_type(xsimd::batch< T, A > const &self) noexcept
T load_and_extend(const T2 *src) noexcept