clang 22.0.0git
avx10_2bf16intrin.h
Go to the documentation of this file.
1/*===-------------- avx10_2bf16intrin.h - AVX10-BF16 intrinsics ------------===
2 *
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 *
7 *===-----------------------------------------------------------------------===
8 */
9#ifndef __IMMINTRIN_H
10#error \
11 "Never use <avx10_2bf16intrin.h> directly; include <immintrin.h> instead."
12#endif
13
14#ifdef __SSE2__
15
16#ifndef __AVX10_2BF16INTRIN_H
17#define __AVX10_2BF16INTRIN_H
18
19typedef __bf16 __m128bh_u __attribute__((__vector_size__(16), __aligned__(1)));
20typedef __bf16 __m256bh_u __attribute__((__vector_size__(32), __aligned__(1)));
21
22/* Define the default attributes for the functions in this file. */
23#define __DEFAULT_FN_ATTRS256 \
24 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2"), \
25 __min_vector_width__(256)))
26#define __DEFAULT_FN_ATTRS128 \
27 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2"), \
28 __min_vector_width__(128)))
29
30#if defined(__cplusplus) && (__cplusplus >= 201103L)
31#define __DEFAULT_FN_ATTRS128_CONSTEXPR __DEFAULT_FN_ATTRS128 constexpr
32#define __DEFAULT_FN_ATTRS256_CONSTEXPR __DEFAULT_FN_ATTRS256 constexpr
33#else
34#define __DEFAULT_FN_ATTRS128_CONSTEXPR __DEFAULT_FN_ATTRS128
35#define __DEFAULT_FN_ATTRS256_CONSTEXPR __DEFAULT_FN_ATTRS256
36#endif
37
38static __inline __m256bh __DEFAULT_FN_ATTRS256 _mm256_setzero_pbh(void) {
39 return __builtin_bit_cast(__m256bh, _mm256_setzero_ps());
40}
41
42static __inline __m128bh __DEFAULT_FN_ATTRS128 _mm_setzero_pbh(void) {
43 return __builtin_bit_cast(__m128bh, _mm_setzero_ps());
44}
45
46static __inline__ __m128 __DEFAULT_FN_ATTRS128 _mm_castbf16_ps(__m128bh __a) {
47 return (__m128)__a;
48}
49
50static __inline__ __m256 __DEFAULT_FN_ATTRS256
51_mm256_castbf16_ps(__m256bh __a) {
52 return (__m256)__a;
53}
54
55static __inline__ __m256d __DEFAULT_FN_ATTRS256
56_mm256_castbf16_pd(__m256bh __a) {
57 return (__m256d)__a;
58}
59
60static __inline__ __m128d __DEFAULT_FN_ATTRS128 _mm_castbf16_pd(__m128bh __a) {
61 return (__m128d)__a;
62}
63
64static __inline__ __m128i __DEFAULT_FN_ATTRS128
65_mm_castbf16_si128(__m128bh __a) {
66 return (__m128i)__a;
67}
68
69static __inline__ __m256i __DEFAULT_FN_ATTRS256
70_mm256_castbf16_si256(__m256bh __a) {
71 return (__m256i)__a;
72}
73
74static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_castps_pbh(__m128 __a) {
75 return (__m128bh)__a;
76}
77
78static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_castps_pbh(__m256 __a) {
79 return (__m256bh)__a;
80}
81
82static __inline__ __bf16 __DEFAULT_FN_ATTRS128 _mm_cvtsbh_bf16(__m128bh __a) {
83 return __a[0];
84}
85
86static __inline__ __bf16 __DEFAULT_FN_ATTRS256
87_mm256_cvtsbh_bf16(__m256bh __a) {
88 return __a[0];
89}
90
91static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_castpd_pbh(__m128d __a) {
92 return (__m128bh)__a;
93}
94
95static __inline__ __m256bh __DEFAULT_FN_ATTRS256
96_mm256_castpd_pbh(__m256d __a) {
97 return (__m256bh)__a;
98}
99
100static __inline__ __m128bh __DEFAULT_FN_ATTRS128
101_mm_castsi128_pbh(__m128i __a) {
102 return (__m128bh)__a;
103}
104
105static __inline__ __m256bh __DEFAULT_FN_ATTRS256
106_mm256_castsi256_pbh(__m256i __a) {
107 return (__m256bh)__a;
108}
109
110static __inline__ __m128bh __DEFAULT_FN_ATTRS256
111_mm256_castbf16256_pbh128(__m256bh __a) {
112 return __builtin_shufflevector(__a, __a, 0, 1, 2, 3, 4, 5, 6, 7);
113}
114
115static __inline__ __m256bh __DEFAULT_FN_ATTRS256
116_mm256_castbf16128_pbh256(__m128bh __a) {
117 return __builtin_shufflevector(__a, __a, 0, 1, 2, 3, 4, 5, 6, 7, -1, -1, -1,
118 -1, -1, -1, -1, -1);
119}
120
121static __inline__ __m256bh __DEFAULT_FN_ATTRS256
122_mm256_zextbf16128_pbh256(__m128bh __a) {
123 return __builtin_shufflevector(__a, (__v8bf)_mm_setzero_pbh(), 0, 1, 2, 3, 4,
124 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
125}
126
127static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_undefined_pbh(void) {
128 return (__m256bh)__builtin_ia32_undef256();
129}
130
131static __inline__ __m128bh __DEFAULT_FN_ATTRS128
132_mm_load_sbh(void const *__dp) {
133 __m128bh src = (__v8bf)_mm_setzero_pbh();
134 return (__m128bh)__builtin_ia32_loadsbf16128_mask((const __v8bf *)__dp, src,
135 1);
136}
137
138static __inline__ __m128bh __DEFAULT_FN_ATTRS128
139_mm_mask_load_sbh(__m128bh __W, __mmask8 __U, const void *__A) {
140 __m128bh src = (__v8bf)__builtin_shufflevector(
141 (__v8bf)__W, (__v8bf)_mm_setzero_pbh(), 0, 8, 8, 8, 8, 8, 8, 8);
142
143 return (__m128bh)__builtin_ia32_loadsbf16128_mask((const __v8bf *)__A, src,
144 __U & 1);
145}
146
147static __inline__ __m128bh __DEFAULT_FN_ATTRS128
148_mm_maskz_load_sbh(__mmask8 __U, const void *__A) {
149 return (__m128bh)__builtin_ia32_loadsbf16128_mask(
150 (const __v8bf *)__A, (__v8bf)_mm_setzero_pbh(), __U & 1);
151}
152
153static __inline__ __m256bh __DEFAULT_FN_ATTRS256
154_mm256_load_pbh(void const *__p) {
155 return *(const __m256bh *)__p;
156}
157
158static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_load_pbh(void const *__p) {
159 return *(const __m128bh *)__p;
160}
161
162static __inline__ __m256bh __DEFAULT_FN_ATTRS256
163_mm256_loadu_pbh(void const *__p) {
164 struct __loadu_pbh {
165 __m256bh_u __v;
166 } __attribute__((__packed__, __may_alias__));
167 return ((const struct __loadu_pbh *)__p)->__v;
168}
169
170static __inline__ __m128bh __DEFAULT_FN_ATTRS128
171_mm_loadu_pbh(void const *__p) {
172 struct __loadu_pbh {
173 __m128bh_u __v;
174 } __attribute__((__packed__, __may_alias__));
175 return ((const struct __loadu_pbh *)__p)->__v;
176}
177
178static __inline__ void __DEFAULT_FN_ATTRS128 _mm_store_sbh(void *__dp,
179 __m128bh __a) {
180 struct __mm_store_sbh_struct {
181 __bf16 __u;
182 } __attribute__((__packed__, __may_alias__));
183 ((struct __mm_store_sbh_struct *)__dp)->__u = __a[0];
184}
185
186static __inline__ void __DEFAULT_FN_ATTRS128 _mm_mask_store_sbh(void *__W,
187 __mmask8 __U,
188 __m128bh __A) {
189 __builtin_ia32_storesbf16128_mask((__v8bf *)__W, __A, __U & 1);
190}
191
192static __inline__ void __DEFAULT_FN_ATTRS256 _mm256_store_pbh(void *__P,
193 __m256bh __A) {
194 *(__m256bh *)__P = __A;
195}
196
197static __inline__ void __DEFAULT_FN_ATTRS128 _mm_store_pbh(void *__P,
198 __m128bh __A) {
199 *(__m128bh *)__P = __A;
200}
201
202static __inline__ void __DEFAULT_FN_ATTRS256 _mm256_storeu_pbh(void *__P,
203 __m256bh __A) {
204 struct __storeu_pbh {
205 __m256bh_u __v;
206 } __attribute__((__packed__, __may_alias__));
207 ((struct __storeu_pbh *)__P)->__v = __A;
208}
209
210static __inline__ void __DEFAULT_FN_ATTRS128 _mm_storeu_pbh(void *__P,
211 __m128bh __A) {
212 struct __storeu_pbh {
213 __m128bh_u __v;
214 } __attribute__((__packed__, __may_alias__));
215 ((struct __storeu_pbh *)__P)->__v = __A;
216}
217
218static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_move_sbh(__m128bh __a,
219 __m128bh __b) {
220 __a[0] = __b[0];
221 return __a;
222}
223
224static __inline__ __m128bh __DEFAULT_FN_ATTRS128
225_mm_mask_move_sbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
226 return __builtin_ia32_selectsbf_128(__U, _mm_move_sbh(__A, __B), __W);
227}
228
229static __inline__ __m128bh __DEFAULT_FN_ATTRS128
230_mm_maskz_move_sbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
231 return __builtin_ia32_selectsbf_128(__U, _mm_move_sbh(__A, __B),
232 _mm_setzero_pbh());
233}
234
235static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_undefined_pbh(void) {
236 return (__m128bh)__builtin_ia32_undef128();
237}
238
239static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_set_sbh(__bf16 bf) {
240 return (__v8bf)__builtin_shufflevector(
241 (__v8bf){bf, bf, bf, bf, bf, bf, bf, bf}, (__v8bf)_mm_setzero_pbh(), 0, 8,
242 8, 8, 8, 8, 8, 8);
243}
244
245static __inline __m128bh __DEFAULT_FN_ATTRS128 _mm_set1_pbh(__bf16 bf) {
246 return (__m128bh)(__v8bf){bf, bf, bf, bf, bf, bf, bf, bf};
247}
248
249static __inline __m256bh __DEFAULT_FN_ATTRS256 _mm256_set1_pbh(__bf16 bf) {
250 return (__m256bh)(__v16bf){bf, bf, bf, bf, bf, bf, bf, bf,
251 bf, bf, bf, bf, bf, bf, bf, bf};
252}
253
254static __inline __m128bh __DEFAULT_FN_ATTRS128
255_mm_set_pbh(__bf16 bf1, __bf16 bf2, __bf16 bf3, __bf16 bf4, __bf16 bf5,
256 __bf16 bf6, __bf16 bf7, __bf16 bf8) {
257 return (__m128bh)(__v8bf){bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8};
258}
259
260static __inline __m256bh __DEFAULT_FN_ATTRS256 _mm256_set_pbh(
261 __bf16 bf1, __bf16 bf2, __bf16 bf3, __bf16 bf4, __bf16 bf5, __bf16 bf6,
262 __bf16 bf7, __bf16 bf8, __bf16 bf9, __bf16 bf10, __bf16 bf11, __bf16 bf12,
263 __bf16 bf13, __bf16 bf14, __bf16 bf15, __bf16 bf16) {
264 return (__m256bh)(__v16bf){bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8,
265 bf9, bf10, bf11, bf12, bf13, bf14, bf15, bf16};
266}
267
268#define _mm_setr_pbh(bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8) \
269 _mm_set_pbh((bf8), (bf7), (bf6), (bf5), (bf4), (bf3), (bf2), (bf1))
270
271#define _mm256_setr_pbh(bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8, bf9, bf10, \
272 bf11, bf12, bf13, bf14, bf15, bf16) \
273 _mm256_set_pbh((bf16), (bf15), (bf14), (bf13), (bf12), (bf11), (bf10), \
274 (bf9), (bf8), (bf7), (bf6), (bf5), (bf4), (bf3), (bf2), \
275 (bf1))
276
277static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_abs_pbh(__m256bh __A) {
278 return (__m256bh)_mm256_and_epi32(_mm256_set1_epi32(0x7FFF7FFF),
279 (__m256i)__A);
280}
281
282static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_abs_pbh(__m128bh __A) {
283 return (__m128bh)_mm_and_epi32(_mm_set1_epi32(0x7FFF7FFF), (__m128i)__A);
284}
285
286static __inline__ __m128bh __DEFAULT_FN_ATTRS128
287_mm_mask_blend_pbh(__mmask8 __U, __m128bh __A, __m128bh __W) {
288 return (__m128bh)__builtin_ia32_selectpbf_128((__mmask8)__U, (__v8bf)__W,
289 (__v8bf)__A);
290}
291
292static __inline__ __m256bh __DEFAULT_FN_ATTRS256
293_mm256_mask_blend_pbh(__mmask16 __U, __m256bh __A, __m256bh __W) {
294 return (__m256bh)__builtin_ia32_selectpbf_256((__mmask16)__U, (__v16bf)__W,
295 (__v16bf)__A);
296}
297
298static __inline__ __m128bh __DEFAULT_FN_ATTRS128_CONSTEXPR
299_mm_permutex2var_pbh(__m128bh __A, __m128i __I, __m128bh __B) {
300 return (__m128bh)__builtin_ia32_vpermi2varhi128((__v8hi)__A, (__v8hi)__I,
301 (__v8hi)__B);
302}
303
304static __inline__ __m256bh __DEFAULT_FN_ATTRS256_CONSTEXPR
305_mm256_permutex2var_pbh(__m256bh __A, __m256i __I, __m256bh __B) {
306 return (__m256bh)__builtin_ia32_vpermi2varhi256((__v16hi)__A, (__v16hi)__I,
307 (__v16hi)__B);
308}
309
310static __inline__ __m128bh __DEFAULT_FN_ATTRS128
311_mm_permutexvar_pbh(__m128i __A, __m128bh __B) {
312 return (__m128bh)__builtin_ia32_permvarhi128((__v8hi)__B, (__v8hi)__A);
313}
314
315static __inline__ __m256bh __DEFAULT_FN_ATTRS256
316_mm256_permutexvar_pbh(__m256i __A, __m256bh __B) {
317 return (__m256bh)__builtin_ia32_permvarhi256((__v16hi)__B, (__v16hi)__A);
318}
319
320static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_add_pbh(__m256bh __A,
321 __m256bh __B) {
322 return (__m256bh)((__v16bf)__A + (__v16bf)__B);
323}
324
325static __inline__ __m256bh __DEFAULT_FN_ATTRS256
326_mm256_mask_add_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
327 return (__m256bh)__builtin_ia32_selectpbf_256(
328 (__mmask16)__U, (__v16bf)_mm256_add_pbh(__A, __B), (__v16bf)__W);
329}
330
331static __inline__ __m256bh __DEFAULT_FN_ATTRS256
332_mm256_maskz_add_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
333 return (__m256bh)__builtin_ia32_selectpbf_256(
334 (__mmask16)__U, (__v16bf)_mm256_add_pbh(__A, __B),
335 (__v16bf)_mm256_setzero_pbh());
336}
337
338static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_add_pbh(__m128bh __A,
339 __m128bh __B) {
340 return (__m128bh)((__v8bf)__A + (__v8bf)__B);
341}
342
343static __inline__ __m128bh __DEFAULT_FN_ATTRS128
344_mm_mask_add_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
345 return (__m128bh)__builtin_ia32_selectpbf_128(
346 (__mmask8)__U, (__v8bf)_mm_add_pbh(__A, __B), (__v8bf)__W);
347}
348
349static __inline__ __m128bh __DEFAULT_FN_ATTRS128
350_mm_maskz_add_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
351 return (__m128bh)__builtin_ia32_selectpbf_128(
352 (__mmask8)__U, (__v8bf)_mm_add_pbh(__A, __B), (__v8bf)_mm_setzero_pbh());
353}
354
355static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_sub_pbh(__m256bh __A,
356 __m256bh __B) {
357 return (__m256bh)((__v16bf)__A - (__v16bf)__B);
358}
359
360static __inline__ __m256bh __DEFAULT_FN_ATTRS256
361_mm256_mask_sub_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
362 return (__m256bh)__builtin_ia32_selectpbf_256(
363 (__mmask16)__U, (__v16bf)_mm256_sub_pbh(__A, __B), (__v16bf)__W);
364}
365
366static __inline__ __m256bh __DEFAULT_FN_ATTRS256
367_mm256_maskz_sub_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
368 return (__m256bh)__builtin_ia32_selectpbf_256(
369 (__mmask16)__U, (__v16bf)_mm256_sub_pbh(__A, __B),
370 (__v16bf)_mm256_setzero_pbh());
371}
372
373static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_sub_pbh(__m128bh __A,
374 __m128bh __B) {
375 return (__m128bh)((__v8bf)__A - (__v8bf)__B);
376}
377
378static __inline__ __m128bh __DEFAULT_FN_ATTRS128
379_mm_mask_sub_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
380 return (__m128bh)__builtin_ia32_selectpbf_128(
381 (__mmask8)__U, (__v8bf)_mm_sub_pbh(__A, __B), (__v8bf)__W);
382}
383
384static __inline__ __m128bh __DEFAULT_FN_ATTRS128
385_mm_maskz_sub_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
386 return (__m128bh)__builtin_ia32_selectpbf_128(
387 (__mmask8)__U, (__v8bf)_mm_sub_pbh(__A, __B), (__v8bf)_mm_setzero_pbh());
388}
389
390static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mul_pbh(__m256bh __A,
391 __m256bh __B) {
392 return (__m256bh)((__v16bf)__A * (__v16bf)__B);
393}
394
395static __inline__ __m256bh __DEFAULT_FN_ATTRS256
396_mm256_mask_mul_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
397 return (__m256bh)__builtin_ia32_selectpbf_256(
398 (__mmask16)__U, (__v16bf)_mm256_mul_pbh(__A, __B), (__v16bf)__W);
399}
400
401static __inline__ __m256bh __DEFAULT_FN_ATTRS256
402_mm256_maskz_mul_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
403 return (__m256bh)__builtin_ia32_selectpbf_256(
404 (__mmask16)__U, (__v16bf)_mm256_mul_pbh(__A, __B),
405 (__v16bf)_mm256_setzero_pbh());
406}
407
408static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_mul_pbh(__m128bh __A,
409 __m128bh __B) {
410 return (__m128bh)((__v8bf)__A * (__v8bf)__B);
411}
412
413static __inline__ __m128bh __DEFAULT_FN_ATTRS128
414_mm_mask_mul_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
415 return (__m128bh)__builtin_ia32_selectpbf_128(
416 (__mmask8)__U, (__v8bf)_mm_mul_pbh(__A, __B), (__v8bf)__W);
417}
418
419static __inline__ __m128bh __DEFAULT_FN_ATTRS128
420_mm_maskz_mul_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
421 return (__m128bh)__builtin_ia32_selectpbf_128(
422 (__mmask8)__U, (__v8bf)_mm_mul_pbh(__A, __B), (__v8bf)_mm_setzero_pbh());
423}
424
425static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_div_pbh(__m256bh __A,
426 __m256bh __B) {
427 return (__m256bh)((__v16bf)__A / (__v16bf)__B);
428}
429
430static __inline__ __m256bh __DEFAULT_FN_ATTRS256
431_mm256_mask_div_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
432 return (__m256bh)__builtin_ia32_selectpbf_256(
433 (__mmask16)__U, (__v16bf)_mm256_div_pbh(__A, __B), (__v16bf)__W);
434}
435
436static __inline__ __m256bh __DEFAULT_FN_ATTRS256
437_mm256_maskz_div_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
438 return (__m256bh)__builtin_ia32_selectpbf_256(
439 (__mmask16)__U, (__v16bf)_mm256_div_pbh(__A, __B),
440 (__v16bf)_mm256_setzero_pbh());
441}
442
443static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_div_pbh(__m128bh __A,
444 __m128bh __B) {
445 return (__m128bh)((__v8bf)__A / (__v8bf)__B);
446}
447
448static __inline__ __m128bh __DEFAULT_FN_ATTRS128
449_mm_mask_div_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
450 return (__m128bh)__builtin_ia32_selectpbf_128(
451 (__mmask8)__U, (__v8bf)_mm_div_pbh(__A, __B), (__v8bf)__W);
452}
453
454static __inline__ __m128bh __DEFAULT_FN_ATTRS128
455_mm_maskz_div_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
456 return (__m128bh)__builtin_ia32_selectpbf_128(
457 (__mmask8)__U, (__v8bf)_mm_div_pbh(__A, __B), (__v8bf)_mm_setzero_pbh());
458}
459
460static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_max_pbh(__m256bh __A,
461 __m256bh __B) {
462 return (__m256bh)__builtin_ia32_vmaxbf16256((__v16bf)__A, (__v16bf)__B);
463}
464
465static __inline__ __m256bh __DEFAULT_FN_ATTRS256
466_mm256_mask_max_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
467 return (__m256bh)__builtin_ia32_selectpbf_256(
468 (__mmask16)__U, (__v16bf)_mm256_max_pbh(__A, __B), (__v16bf)__W);
469}
470
471static __inline__ __m256bh __DEFAULT_FN_ATTRS256
472_mm256_maskz_max_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
473 return (__m256bh)__builtin_ia32_selectpbf_256(
474 (__mmask16)__U, (__v16bf)_mm256_max_pbh(__A, __B),
475 (__v16bf)_mm256_setzero_pbh());
476}
477
478static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_max_pbh(__m128bh __A,
479 __m128bh __B) {
480 return (__m128bh)__builtin_ia32_vmaxbf16128((__v8bf)__A, (__v8bf)__B);
481}
482
483static __inline__ __m128bh __DEFAULT_FN_ATTRS128
484_mm_mask_max_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
485 return (__m128bh)__builtin_ia32_selectpbf_128(
486 (__mmask8)__U, (__v8bf)_mm_max_pbh(__A, __B), (__v8bf)__W);
487}
488
489static __inline__ __m128bh __DEFAULT_FN_ATTRS128
490_mm_maskz_max_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
491 return (__m128bh)__builtin_ia32_selectpbf_128(
492 (__mmask8)__U, (__v8bf)_mm_max_pbh(__A, __B), (__v8bf)_mm_setzero_pbh());
493}
494
495static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_min_pbh(__m256bh __A,
496 __m256bh __B) {
497 return (__m256bh)__builtin_ia32_vminbf16256((__v16bf)__A, (__v16bf)__B);
498}
499
500static __inline__ __m256bh __DEFAULT_FN_ATTRS256
501_mm256_mask_min_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
502 return (__m256bh)__builtin_ia32_selectpbf_256(
503 (__mmask16)__U, (__v16bf)_mm256_min_pbh(__A, __B), (__v16bf)__W);
504}
505
506static __inline__ __m256bh __DEFAULT_FN_ATTRS256
507_mm256_maskz_min_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
508 return (__m256bh)__builtin_ia32_selectpbf_256(
509 (__mmask16)__U, (__v16bf)_mm256_min_pbh(__A, __B),
510 (__v16bf)_mm256_setzero_pbh());
511}
512
513static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_min_pbh(__m128bh __A,
514 __m128bh __B) {
515 return (__m128bh)__builtin_ia32_vminbf16128((__v8bf)__A, (__v8bf)__B);
516}
517
518static __inline__ __m128bh __DEFAULT_FN_ATTRS128
519_mm_mask_min_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
520 return (__m128bh)__builtin_ia32_selectpbf_128(
521 (__mmask8)__U, (__v8bf)_mm_min_pbh(__A, __B), (__v8bf)__W);
522}
523
524static __inline__ __m128bh __DEFAULT_FN_ATTRS128
525_mm_maskz_min_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
526 return (__m128bh)__builtin_ia32_selectpbf_128(
527 (__mmask8)__U, (__v8bf)_mm_min_pbh(__A, __B), (__v8bf)_mm_setzero_pbh());
528}
529
530static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comieq_sbh(__m128bh __A,
531 __m128bh __B) {
532 return __builtin_ia32_vcomisbf16eq((__v8bf)__A, (__v8bf)__B);
533}
534
535static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comilt_sbh(__m128bh __A,
536 __m128bh __B) {
537 return __builtin_ia32_vcomisbf16lt((__v8bf)__A, (__v8bf)__B);
538}
539
540static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comile_sbh(__m128bh __A,
541 __m128bh __B) {
542 return __builtin_ia32_vcomisbf16le((__v8bf)__A, (__v8bf)__B);
543}
544
545static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comigt_sbh(__m128bh __A,
546 __m128bh __B) {
547 return __builtin_ia32_vcomisbf16gt((__v8bf)__A, (__v8bf)__B);
548}
549
550static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comige_sbh(__m128bh __A,
551 __m128bh __B) {
552 return __builtin_ia32_vcomisbf16ge((__v8bf)__A, (__v8bf)__B);
553}
554
555static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comineq_sbh(__m128bh __A,
556 __m128bh __B) {
557 return __builtin_ia32_vcomisbf16neq((__v8bf)__A, (__v8bf)__B);
558}
559
560#define _mm256_cmp_pbh_mask(__A, __B, __P) \
561 ((__mmask16)__builtin_ia32_vcmpbf16256_mask((__v16bf)(__m256bh)(__A), \
562 (__v16bf)(__m256bh)(__B), \
563 (int)(__P), (__mmask16) - 1))
564
565#define _mm256_mask_cmp_pbh_mask(__U, __A, __B, __P) \
566 ((__mmask16)__builtin_ia32_vcmpbf16256_mask((__v16bf)(__m256bh)(__A), \
567 (__v16bf)(__m256bh)(__B), \
568 (int)(__P), (__mmask16)(__U)))
569
570#define _mm_cmp_pbh_mask(__A, __B, __P) \
571 ((__mmask8)__builtin_ia32_vcmpbf16128_mask((__v8bf)(__m128bh)(__A), \
572 (__v8bf)(__m128bh)(__B), \
573 (int)(__P), (__mmask8) - 1))
574
575#define _mm_mask_cmp_pbh_mask(__U, __A, __B, __P) \
576 ((__mmask8)__builtin_ia32_vcmpbf16128_mask((__v8bf)(__m128bh)(__A), \
577 (__v8bf)(__m128bh)(__B), \
578 (int)(__P), (__mmask8)(__U)))
579
580#define _mm256_mask_fpclass_pbh_mask(__U, __A, imm) \
581 ((__mmask16)__builtin_ia32_vfpclassbf16256_mask( \
582 (__v16bf)(__m256bh)(__A), (int)(imm), (__mmask16)(__U)))
583
584#define _mm256_fpclass_pbh_mask(__A, imm) \
585 ((__mmask16)__builtin_ia32_vfpclassbf16256_mask( \
586 (__v16bf)(__m256bh)(__A), (int)(imm), (__mmask16) - 1))
587
588#define _mm_mask_fpclass_pbh_mask(__U, __A, imm) \
589 ((__mmask8)__builtin_ia32_vfpclassbf16128_mask((__v8bf)(__m128bh)(__A), \
590 (int)(imm), (__mmask8)(__U)))
591
592#define _mm_fpclass_pbh_mask(__A, imm) \
593 ((__mmask8)__builtin_ia32_vfpclassbf16128_mask((__v8bf)(__m128bh)(__A), \
594 (int)(imm), (__mmask8) - 1))
595
596static __inline__ __m256bh __DEFAULT_FN_ATTRS256
597_mm256_scalef_pbh(__m256bh __A, __m256bh __B) {
598 return (__m256bh)__builtin_ia32_vscalefbf16256_mask(
599 (__v16bf)__A, (__v16bf)__B, (__v16bf)_mm256_undefined_pbh(),
600 (__mmask16)-1);
601}
602
603static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask_scalef_pbh(
604 __m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
605 return (__m256bh)__builtin_ia32_vscalefbf16256_mask(
606 (__v16bf)__A, (__v16bf)__B, (__v16bf)__W, (__mmask16)__U);
607}
608
609static __inline__ __m256bh __DEFAULT_FN_ATTRS256
610_mm256_maskz_scalef_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
611 return (__m256bh)__builtin_ia32_vscalefbf16256_mask(
612 (__v16bf)__A, (__v16bf)__B, (__v16bf)_mm256_setzero_pbh(),
613 (__mmask16)__U);
614}
615
616static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_scalef_pbh(__m128bh __A,
617 __m128bh __B) {
618 return (__m128bh)__builtin_ia32_vscalefbf16128_mask(
619 (__v8bf)__A, (__v8bf)__B, (__v8bf)_mm_undefined_pbh(), (__mmask8)-1);
620}
621
622static __inline__ __m128bh __DEFAULT_FN_ATTRS128
623_mm_mask_scalef_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
624 return (__m128bh)__builtin_ia32_vscalefbf16128_mask(
625 (__v8bf)__A, (__v8bf)__B, (__v8bf)__W, (__mmask8)__U);
626}
627
628static __inline__ __m128bh __DEFAULT_FN_ATTRS128
629_mm_maskz_scalef_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
630 return (__m128bh)__builtin_ia32_vscalefbf16128_mask(
631 (__v8bf)__A, (__v8bf)__B, (__v8bf)_mm_setzero_pbh(), (__mmask8)__U);
632}
633
634static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_rcp_pbh(__m256bh __A) {
635 return (__m256bh)__builtin_ia32_vrcpbf16256_mask(
636 (__v16bf)__A, (__v16bf)_mm256_undefined_pbh(), (__mmask16)-1);
637}
638
639static __inline__ __m256bh __DEFAULT_FN_ATTRS256
640_mm256_mask_rcp_pbh(__m256bh __W, __mmask16 __U, __m256bh __A) {
641 return (__m256bh)__builtin_ia32_vrcpbf16256_mask((__v16bf)__A, (__v16bf)__W,
642 (__mmask16)__U);
643}
644
645static __inline__ __m256bh __DEFAULT_FN_ATTRS256
646_mm256_maskz_rcp_pbh(__mmask16 __U, __m256bh __A) {
647 return (__m256bh)__builtin_ia32_vrcpbf16256_mask(
648 (__v16bf)__A, (__v16bf)_mm256_setzero_pbh(), (__mmask16)__U);
649}
650
651static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_rcp_pbh(__m128bh __A) {
652 return (__m128bh)__builtin_ia32_vrcpbf16128_mask(
653 (__v8bf)__A, (__v8bf)_mm_undefined_pbh(), (__mmask8)-1);
654}
655
656static __inline__ __m128bh __DEFAULT_FN_ATTRS128
657_mm_mask_rcp_pbh(__m128bh __W, __mmask8 __U, __m128bh __A) {
658 return (__m128bh)__builtin_ia32_vrcpbf16128_mask((__v8bf)__A, (__v8bf)__W,
659 (__mmask8)__U);
660}
661
662static __inline__ __m128bh __DEFAULT_FN_ATTRS128
663_mm_maskz_rcp_pbh(__mmask8 __U, __m128bh __A) {
664 return (__m128bh)__builtin_ia32_vrcpbf16128_mask(
665 (__v8bf)__A, (__v8bf)_mm_setzero_pbh(), (__mmask8)__U);
666}
667
668static __inline__ __m256bh __DEFAULT_FN_ATTRS256
669_mm256_getexp_pbh(__m256bh __A) {
670 return (__m256bh)__builtin_ia32_vgetexpbf16256_mask(
671 (__v16bf)__A, (__v16bf)_mm256_undefined_pbh(), (__mmask16)-1);
672}
673
674static __inline__ __m256bh __DEFAULT_FN_ATTRS256
675_mm256_mask_getexp_pbh(__m256bh __W, __mmask16 __U, __m256bh __A) {
676 return (__m256bh)__builtin_ia32_vgetexpbf16256_mask(
677 (__v16bf)__A, (__v16bf)__W, (__mmask16)__U);
678}
679
680static __inline__ __m256bh __DEFAULT_FN_ATTRS256
681_mm256_maskz_getexp_pbh(__mmask16 __U, __m256bh __A) {
682 return (__m256bh)__builtin_ia32_vgetexpbf16256_mask(
683 (__v16bf)__A, (__v16bf)_mm256_setzero_pbh(), (__mmask16)__U);
684}
685
686static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_getexp_pbh(__m128bh __A) {
687 return (__m128bh)__builtin_ia32_vgetexpbf16128_mask(
688 (__v8bf)__A, (__v8bf)_mm_undefined_pbh(), (__mmask8)-1);
689}
690
691static __inline__ __m128bh __DEFAULT_FN_ATTRS128
692_mm_mask_getexp_pbh(__m128bh __W, __mmask8 __U, __m128bh __A) {
693 return (__m128bh)__builtin_ia32_vgetexpbf16128_mask((__v8bf)__A, (__v8bf)__W,
694 (__mmask8)__U);
695}
696
697static __inline__ __m128bh __DEFAULT_FN_ATTRS128
698_mm_maskz_getexp_pbh(__mmask8 __U, __m128bh __A) {
699 return (__m128bh)__builtin_ia32_vgetexpbf16128_mask(
700 (__v8bf)__A, (__v8bf)_mm_setzero_pbh(), (__mmask8)__U);
701}
702
703static __inline__ __m256bh __DEFAULT_FN_ATTRS256
704_mm256_rsqrt_pbh(__m256bh __A) {
705 return (__m256bh)__builtin_ia32_vrsqrtbf16256_mask(
706 (__v16bf)__A, (__v16bf)_mm256_undefined_pbh(), (__mmask16)-1);
707}
708
709static __inline__ __m256bh __DEFAULT_FN_ATTRS256
710_mm256_mask_rsqrt_pbh(__m256bh __W, __mmask16 __U, __m256bh __A) {
711 return (__m256bh)__builtin_ia32_vrsqrtbf16256_mask((__v16bf)__A, (__v16bf)__W,
712 (__mmask16)__U);
713}
714
715static __inline__ __m256bh __DEFAULT_FN_ATTRS256
716_mm256_maskz_rsqrt_pbh(__mmask16 __U, __m256bh __A) {
717 return (__m256bh)__builtin_ia32_vrsqrtbf16256_mask(
718 (__v16bf)__A, (__v16bf)_mm256_setzero_pbh(), (__mmask16)__U);
719}
720
721static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_rsqrt_pbh(__m128bh __A) {
722 return (__m128bh)__builtin_ia32_vrsqrtbf16128_mask(
723 (__v8bf)__A, (__v8bf)_mm_undefined_pbh(), (__mmask8)-1);
724}
725
726static __inline__ __m128bh __DEFAULT_FN_ATTRS128
727_mm_mask_rsqrt_pbh(__m128bh __W, __mmask8 __U, __m128bh __A) {
728 return (__m128bh)__builtin_ia32_vrsqrtbf16128_mask((__v8bf)__A, (__v8bf)__W,
729 (__mmask8)__U);
730}
731
732static __inline__ __m128bh __DEFAULT_FN_ATTRS128
733_mm_maskz_rsqrt_pbh(__mmask8 __U, __m128bh __A) {
734 return (__m128bh)__builtin_ia32_vrsqrtbf16128_mask(
735 (__v8bf)__A, (__v8bf)_mm_setzero_pbh(), (__mmask8)__U);
736}
737
738#define _mm256_reduce_pbh(__A, imm) \
739 ((__m256bh)__builtin_ia32_vreducebf16256_mask( \
740 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_undefined_pbh(), \
741 (__mmask16) - 1))
742
743#define _mm256_mask_reduce_pbh(__W, __U, __A, imm) \
744 ((__m256bh)__builtin_ia32_vreducebf16256_mask( \
745 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)(__m256bh)(__W), \
746 (__mmask16)(__U)))
747
748#define _mm256_maskz_reduce_pbh(__U, __A, imm) \
749 ((__m256bh)__builtin_ia32_vreducebf16256_mask( \
750 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
751 (__mmask16)(__U)))
752
753#define _mm_reduce_pbh(__A, imm) \
754 ((__m128bh)__builtin_ia32_vreducebf16128_mask( \
755 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_undefined_pbh(), \
756 (__mmask8) - 1))
757
758#define _mm_mask_reduce_pbh(__W, __U, __A, imm) \
759 ((__m128bh)__builtin_ia32_vreducebf16128_mask( \
760 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)(__m128bh)(__W), \
761 (__mmask8)(__U)))
762
763#define _mm_maskz_reduce_pbh(__U, __A, imm) \
764 ((__m128bh)__builtin_ia32_vreducebf16128_mask( \
765 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
766 (__mmask8)(__U)))
767
768#define _mm256_roundscale_pbh(__A, imm) \
769 ((__m256bh)__builtin_ia32_vrndscalebf16_256_mask( \
770 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
771 (__mmask16) - 1))
772
773#define _mm256_mask_roundscale_pbh(__W, __U, __A, imm) \
774 ((__m256bh)__builtin_ia32_vrndscalebf16_256_mask( \
775 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)(__m256bh)(__W), \
776 (__mmask16)(__U)))
777
778#define _mm256_maskz_roundscale_pbh(__U, __A, imm) \
779 ((__m256bh)__builtin_ia32_vrndscalebf16_256_mask( \
780 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
781 (__mmask16)(__U)))
782
783#define _mm_roundscale_pbh(__A, imm) \
784 ((__m128bh)__builtin_ia32_vrndscalebf16_128_mask( \
785 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
786 (__mmask8) - 1))
787
788#define _mm_mask_roundscale_pbh(__W, __U, __A, imm) \
789 ((__m128bh)__builtin_ia32_vrndscalebf16_128_mask( \
790 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)(__m128bh)(__W), \
791 (__mmask8)(__U)))
792
793#define _mm_maskz_roundscale_pbh(__U, __A, imm) \
794 ((__m128bh)__builtin_ia32_vrndscalebf16_128_mask( \
795 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
796 (__mmask8)(__U)))
797
798#define _mm256_getmant_pbh(__A, __B, __C) \
799 ((__m256bh)__builtin_ia32_vgetmantbf16256_mask( \
800 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
801 (__v16bf)_mm256_undefined_pbh(), (__mmask16) - 1))
802
803#define _mm256_mask_getmant_pbh(__W, __U, __A, __B, __C) \
804 ((__m256bh)__builtin_ia32_vgetmantbf16256_mask( \
805 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
806 (__v16bf)(__m256bh)(__W), (__mmask16)(__U)))
807
808#define _mm256_maskz_getmant_pbh(__U, __A, __B, __C) \
809 ((__m256bh)__builtin_ia32_vgetmantbf16256_mask( \
810 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
811 (__v16bf)_mm256_setzero_pbh(), (__mmask16)(__U)))
812
813#define _mm_getmant_pbh(__A, __B, __C) \
814 ((__m128bh)__builtin_ia32_vgetmantbf16128_mask( \
815 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
816 (__v8bf)_mm_undefined_pbh(), (__mmask8) - 1))
817
818#define _mm_mask_getmant_pbh(__W, __U, __A, __B, __C) \
819 ((__m128bh)__builtin_ia32_vgetmantbf16128_mask( \
820 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
821 (__v8bf)(__m128bh)(__W), (__mmask8)(__U)))
822
823#define _mm_maskz_getmant_pbh(__U, __A, __B, __C) \
824 ((__m128bh)__builtin_ia32_vgetmantbf16128_mask( \
825 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
826 (__v8bf)_mm_setzero_pbh(), (__mmask8)(__U)))
827
828static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_sqrt_pbh(__m256bh __A) {
829 return (__m256bh)__builtin_ia32_vsqrtbf16256((__v16bf)__A);
830}
831
832static __inline__ __m256bh __DEFAULT_FN_ATTRS256
833_mm256_mask_sqrt_pbh(__m256bh __W, __mmask16 __U, __m256bh __A) {
834 return (__m256bh)__builtin_ia32_selectpbf_256(
835 (__mmask16)__U, (__v16bf)_mm256_sqrt_pbh(__A), (__v16bf)__W);
836}
837
838static __inline__ __m256bh __DEFAULT_FN_ATTRS256
839_mm256_maskz_sqrt_pbh(__mmask16 __U, __m256bh __A) {
840 return (__m256bh)__builtin_ia32_selectpbf_256((__mmask16)__U,
841 (__v16bf)_mm256_sqrt_pbh(__A),
842 (__v16bf)_mm256_setzero_pbh());
843}
844
845static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_sqrt_pbh(__m128bh __A) {
846 return (__m128bh)__builtin_ia32_vsqrtbf16((__v8bf)__A);
847}
848
849static __inline__ __m128bh __DEFAULT_FN_ATTRS128
850_mm_mask_sqrt_pbh(__m128bh __W, __mmask8 __U, __m128bh __A) {
851 return (__m128bh)__builtin_ia32_selectpbf_128(
852 (__mmask8)__U, (__v8bf)_mm_sqrt_pbh(__A), (__v8bf)__W);
853}
854
855static __inline__ __m128bh __DEFAULT_FN_ATTRS128
856_mm_maskz_sqrt_pbh(__mmask8 __U, __m128bh __A) {
857 return (__m128bh)__builtin_ia32_selectpbf_128(
858 (__mmask8)__U, (__v8bf)_mm_sqrt_pbh(__A), (__v8bf)_mm_setzero_pbh());
859}
860
861static __inline__ __m256bh __DEFAULT_FN_ATTRS256
862_mm256_fmadd_pbh(__m256bh __A, __m256bh __B, __m256bh __C) {
863 return (__m256bh)__builtin_elementwise_fma((__v16bf)__A, (__v16bf)__B,
864 (__v16bf)__C);
865}
866
867static __inline__ __m256bh __DEFAULT_FN_ATTRS256
868_mm256_mask_fmadd_pbh(__m256bh __A, __mmask16 __U, __m256bh __B, __m256bh __C) {
869 return (__m256bh)__builtin_ia32_selectpbf_256(
870 (__mmask16)__U,
871 _mm256_fmadd_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C), (__v16bf)__A);
872}
873
874static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask3_fmadd_pbh(
875 __m256bh __A, __m256bh __B, __m256bh __C, __mmask16 __U) {
876 return (__m256bh)__builtin_ia32_selectpbf_256(
877 (__mmask16)__U,
878 _mm256_fmadd_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C), (__v16bf)__C);
879}
880
881static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_maskz_fmadd_pbh(
882 __mmask16 __U, __m256bh __A, __m256bh __B, __m256bh __C) {
883 return (__m256bh)__builtin_ia32_selectpbf_256(
884 (__mmask16)__U,
885 _mm256_fmadd_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
886 (__v16bf)_mm256_setzero_pbh());
887}
888
889static __inline__ __m256bh __DEFAULT_FN_ATTRS256
890_mm256_fmsub_pbh(__m256bh __A, __m256bh __B, __m256bh __C) {
891 return (__m256bh)__builtin_elementwise_fma((__v16bf)__A, (__v16bf)__B,
892 -(__v16bf)__C);
893}
894
895static __inline__ __m256bh __DEFAULT_FN_ATTRS256
896_mm256_mask_fmsub_pbh(__m256bh __A, __mmask16 __U, __m256bh __B, __m256bh __C) {
897 return (__m256bh)__builtin_ia32_selectpbf_256(
898 (__mmask16)__U,
899 _mm256_fmsub_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C), (__v16bf)__A);
900}
901
902static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask3_fmsub_pbh(
903 __m256bh __A, __m256bh __B, __m256bh __C, __mmask16 __U) {
904 return (__m256bh)__builtin_ia32_selectpbf_256(
905 (__mmask16)__U,
906 _mm256_fmsub_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C), (__v16bf)__C);
907}
908
909static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_maskz_fmsub_pbh(
910 __mmask16 __U, __m256bh __A, __m256bh __B, __m256bh __C) {
911 return (__m256bh)__builtin_ia32_selectpbf_256(
912 (__mmask16)__U,
913 _mm256_fmsub_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
914 (__v16bf)_mm256_setzero_pbh());
915}
916
917static __inline__ __m256bh __DEFAULT_FN_ATTRS256
918_mm256_fnmadd_pbh(__m256bh __A, __m256bh __B, __m256bh __C) {
919 return (__m256bh)__builtin_elementwise_fma((__v16bf)__A, -(__v16bf)__B,
920 (__v16bf)__C);
921}
922
923static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask_fnmadd_pbh(
924 __m256bh __A, __mmask16 __U, __m256bh __B, __m256bh __C) {
925 return (__m256bh)__builtin_ia32_selectpbf_256(
926 (__mmask16)__U,
927 _mm256_fnmadd_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
928 (__v16bf)__A);
929}
930
931static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask3_fnmadd_pbh(
932 __m256bh __A, __m256bh __B, __m256bh __C, __mmask16 __U) {
933 return (__m256bh)__builtin_ia32_selectpbf_256(
934 (__mmask16)__U,
935 _mm256_fnmadd_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
936 (__v16bf)__C);
937}
938
939static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_maskz_fnmadd_pbh(
940 __mmask16 __U, __m256bh __A, __m256bh __B, __m256bh __C) {
941 return (__m256bh)__builtin_ia32_selectpbf_256(
942 (__mmask16)__U,
943 _mm256_fnmadd_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
944 (__v16bf)_mm256_setzero_pbh());
945}
946
947static __inline__ __m256bh __DEFAULT_FN_ATTRS256
948_mm256_fnmsub_pbh(__m256bh __A, __m256bh __B, __m256bh __C) {
949 return (__m256bh)__builtin_elementwise_fma((__v16bf)__A, -(__v16bf)__B,
950 -(__v16bf)__C);
951}
952
953static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask_fnmsub_pbh(
954 __m256bh __A, __mmask16 __U, __m256bh __B, __m256bh __C) {
955 return (__m256bh)__builtin_ia32_selectpbf_256(
956 (__mmask16)__U,
957 _mm256_fnmsub_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
958 (__v16bf)__A);
959}
960
961static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask3_fnmsub_pbh(
962 __m256bh __A, __m256bh __B, __m256bh __C, __mmask16 __U) {
963 return (__m256bh)__builtin_ia32_selectpbf_256(
964 (__mmask16)__U,
965 _mm256_fnmsub_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
966 (__v16bf)__C);
967}
968
969static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_maskz_fnmsub_pbh(
970 __mmask16 __U, __m256bh __A, __m256bh __B, __m256bh __C) {
971 return (__m256bh)__builtin_ia32_selectpbf_256(
972 (__mmask16)__U,
973 _mm256_fnmsub_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
974 (__v16bf)_mm256_setzero_pbh());
975}
976
977static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_fmadd_pbh(__m128bh __A,
978 __m128bh __B,
979 __m128bh __C) {
980 return (__m128bh)__builtin_elementwise_fma((__v8bf)__A, (__v8bf)__B,
981 (__v8bf)__C);
982}
983
984static __inline__ __m128bh __DEFAULT_FN_ATTRS128
985_mm_mask_fmadd_pbh(__m128bh __A, __mmask8 __U, __m128bh __B, __m128bh __C) {
986 return (__m128bh)__builtin_ia32_selectpbf_128(
987 (__mmask8)__U, _mm_fmadd_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
988 (__v8bf)__A);
989}
990
991static __inline__ __m128bh __DEFAULT_FN_ATTRS128
992_mm_mask3_fmadd_pbh(__m128bh __A, __m128bh __B, __m128bh __C, __mmask8 __U) {
993 return (__m128bh)__builtin_ia32_selectpbf_128(
994 (__mmask8)__U, _mm_fmadd_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
995 (__v8bf)__C);
996}
997
998static __inline__ __m128bh __DEFAULT_FN_ATTRS128
999_mm_maskz_fmadd_pbh(__mmask8 __U, __m128bh __A, __m128bh __B, __m128bh __C) {
1000 return (__m128bh)__builtin_ia32_selectpbf_128(
1001 (__mmask8)__U, _mm_fmadd_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1002 (__v8bf)_mm_setzero_pbh());
1003}
1004
1005static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_fmsub_pbh(__m128bh __A,
1006 __m128bh __B,
1007 __m128bh __C) {
1008 return (__m128bh)__builtin_elementwise_fma((__v8bf)__A, (__v8bf)__B,
1009 -(__v8bf)__C);
1010}
1011
1012static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1013_mm_mask_fmsub_pbh(__m128bh __A, __mmask8 __U, __m128bh __B, __m128bh __C) {
1014 return (__m128bh)__builtin_ia32_selectpbf_128(
1015 (__mmask8)__U, _mm_fmsub_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1016 (__v8bf)__A);
1017}
1018
1019static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1020_mm_mask3_fmsub_pbh(__m128bh __A, __m128bh __B, __m128bh __C, __mmask8 __U) {
1021 return (__m128bh)__builtin_ia32_selectpbf_128(
1022 (__mmask8)__U, _mm_fmsub_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1023 (__v8bf)__C);
1024}
1025
1026static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1027_mm_maskz_fmsub_pbh(__mmask8 __U, __m128bh __A, __m128bh __B, __m128bh __C) {
1028 return (__m128bh)__builtin_ia32_selectpbf_128(
1029 (__mmask8)__U, _mm_fmsub_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1030 (__v8bf)_mm_setzero_pbh());
1031}
1032
1033static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_fnmadd_pbh(__m128bh __A,
1034 __m128bh __B,
1035 __m128bh __C) {
1036 return (__m128bh)__builtin_elementwise_fma((__v8bf)__A, -(__v8bf)__B,
1037 (__v8bf)__C);
1038}
1039
1040static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1041_mm_mask_fnmadd_pbh(__m128bh __A, __mmask8 __U, __m128bh __B, __m128bh __C) {
1042 return (__m128bh)__builtin_ia32_selectpbf_128(
1043 (__mmask8)__U, _mm_fnmadd_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1044 (__v8bf)__A);
1045}
1046
1047static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1048_mm_mask3_fnmadd_pbh(__m128bh __A, __m128bh __B, __m128bh __C, __mmask8 __U) {
1049 return (__m128bh)__builtin_ia32_selectpbf_128(
1050 (__mmask8)__U, _mm_fnmadd_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1051 (__v8bf)__C);
1052}
1053
1054static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1055_mm_maskz_fnmadd_pbh(__mmask8 __U, __m128bh __A, __m128bh __B, __m128bh __C) {
1056 return (__m128bh)__builtin_ia32_selectpbf_128(
1057 (__mmask8)__U, _mm_fnmadd_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1058 (__v8bf)_mm_setzero_pbh());
1059}
1060
1061static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_fnmsub_pbh(__m128bh __A,
1062 __m128bh __B,
1063 __m128bh __C) {
1064 return (__m128bh)__builtin_elementwise_fma((__v8bf)__A, -(__v8bf)__B,
1065 -(__v8bf)__C);
1066}
1067
1068static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1069_mm_mask_fnmsub_pbh(__m128bh __A, __mmask8 __U, __m128bh __B, __m128bh __C) {
1070 return (__m128bh)__builtin_ia32_selectpbf_128(
1071 (__mmask8)__U, _mm_fnmsub_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1072 (__v8bf)__A);
1073}
1074
1075static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1076_mm_mask3_fnmsub_pbh(__m128bh __A, __m128bh __B, __m128bh __C, __mmask8 __U) {
1077 return (__m128bh)__builtin_ia32_selectpbf_128(
1078 (__mmask8)__U, _mm_fnmsub_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1079 (__v8bf)__C);
1080}
1081
1082static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1083_mm_maskz_fnmsub_pbh(__mmask8 __U, __m128bh __A, __m128bh __B, __m128bh __C) {
1084 return (__m128bh)__builtin_ia32_selectpbf_128(
1085 (__mmask8)__U, _mm_fnmsub_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1086 (__v8bf)_mm_setzero_pbh());
1087}
1088
1089#undef __DEFAULT_FN_ATTRS128
1090#undef __DEFAULT_FN_ATTRS256
1091#undef __DEFAULT_FN_ATTRS128_CONSTEXPR
1092#undef __DEFAULT_FN_ATTRS256_CONSTEXPR
1093#endif
1094#endif
_Float16 __2f16 __attribute__((ext_vector_type(2)))
Zeroes the upper 128 bits (bits 255:128) of all YMM registers.
static __inline__ vector float vector float __b
Definition altivec.h:578
static __inline__ uint32_t volatile uint32_t * __p
Definition arm_acle.h:57
return __v
Definition arm_acle.h:88
#define __DEFAULT_FN_ATTRS128
#define __DEFAULT_FN_ATTRS256
#define __DEFAULT_FN_ATTRS128_CONSTEXPR
Definition avx2intrin.h:30
#define __DEFAULT_FN_ATTRS256_CONSTEXPR
Definition avx2intrin.h:29
unsigned char __mmask8
unsigned short __mmask16
static __inline__ __m256i __DEFAULT_FN_ATTRS256_CONSTEXPR _mm256_and_epi32(__m256i __a, __m256i __b)
static __inline__ __m128i __DEFAULT_FN_ATTRS128_CONSTEXPR _mm_and_epi32(__m128i __a, __m128i __b)
static __inline __m256 __DEFAULT_FN_ATTRS_CONSTEXPR _mm256_setzero_ps(void)
Constructs a 256-bit floating-point vector of [8 x float] with all vector elements initialized to zer...
Definition avxintrin.h:4304
static __inline __m256i __DEFAULT_FN_ATTRS_CONSTEXPR _mm256_set1_epi32(int __i)
Constructs a 256-bit integer vector of [8 x i32], with each of the 32-bit integral vector elements se...
Definition avxintrin.h:4221
static __inline__ void int __a
Definition emmintrin.h:4077
static __inline__ __m128i __DEFAULT_FN_ATTRS_CONSTEXPR _mm_set1_epi32(int __i)
Initializes all values in a 128-bit vector of [4 x i32] with the specified 32-bit value.
Definition emmintrin.h:3709
__inline unsigned int unsigned int unsigned int * __P
Definition bmi2intrin.h:25
static __inline__ __m128 __DEFAULT_FN_ATTRS_CONSTEXPR _mm_setzero_ps(void)
Constructs a 128-bit floating-point vector of [4 x float] initialized to zero.
Definition xmmintrin.h:2021