clang 20.0.0git
avx10_2bf16intrin.h
Go to the documentation of this file.
1/*===-------------- avx10_2bf16intrin.h - AVX10-BF16 intrinsics ------------===
2 *
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 *
7 *===-----------------------------------------------------------------------===
8 */
9#ifndef __IMMINTRIN_H
10#error \
11 "Never use <avx10_2bf16intrin.h> directly; include <immintrin.h> instead."
12#endif
13
14#ifdef __SSE2__
15
16#ifndef __AVX10_2BF16INTRIN_H
17#define __AVX10_2BF16INTRIN_H
18
19typedef __bf16 __m128bh_u __attribute__((__vector_size__(16), __aligned__(1)));
20typedef __bf16 __m256bh_u __attribute__((__vector_size__(32), __aligned__(1)));
21
22/* Define the default attributes for the functions in this file. */
23#define __DEFAULT_FN_ATTRS256 \
24 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-256"), \
25 __min_vector_width__(256)))
26#define __DEFAULT_FN_ATTRS128 \
27 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-256"), \
28 __min_vector_width__(128)))
29
30static __inline __m256bh __DEFAULT_FN_ATTRS256 _mm256_setzero_pbh(void) {
31 return __builtin_bit_cast(__m256bh, _mm256_setzero_ps());
32}
33
34static __inline __m128bh __DEFAULT_FN_ATTRS128 _mm_setzero_pbh(void) {
35 return __builtin_bit_cast(__m128bh, _mm_setzero_ps());
36}
37
38static __inline__ __m128 __DEFAULT_FN_ATTRS128 _mm_castpbf16_ps(__m128bh __a) {
39 return (__m128)__a;
40}
41
42static __inline__ __m256 __DEFAULT_FN_ATTRS256
43_mm256_castpbf16_ps(__m256bh __a) {
44 return (__m256)__a;
45}
46
47static __inline__ __m256d __DEFAULT_FN_ATTRS256
48_mm256_castpbf16_pd(__m256bh __a) {
49 return (__m256d)__a;
50}
51
52static __inline__ __m128d __DEFAULT_FN_ATTRS128 _mm_castpbf16_pd(__m128bh __a) {
53 return (__m128d)__a;
54}
55
56static __inline__ __m128i __DEFAULT_FN_ATTRS128
57_mm_castpbf16_si128(__m128bh __a) {
58 return (__m128i)__a;
59}
60
61static __inline__ __m256i __DEFAULT_FN_ATTRS256
62_mm256_castpbf16_si256(__m256bh __a) {
63 return (__m256i)__a;
64}
65
66static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_castps_pbh(__m128 __a) {
67 return (__m128bh)__a;
68}
69
70static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_castps_pbh(__m256 __a) {
71 return (__m256bh)__a;
72}
73
74static __inline__ __bf16 __DEFAULT_FN_ATTRS128 _mm_cvtsbh_bf16(__m128bh __a) {
75 return __a[0];
76}
77
78static __inline__ __bf16 __DEFAULT_FN_ATTRS256
79_mm256_cvtsbh_bf16(__m256bh __a) {
80 return __a[0];
81}
82
83static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_castpd_pbh(__m128d __a) {
84 return (__m128bh)__a;
85}
86
87static __inline__ __m256bh __DEFAULT_FN_ATTRS256
88_mm256_castpd_pbh(__m256d __a) {
89 return (__m256bh)__a;
90}
91
92static __inline__ __m128bh __DEFAULT_FN_ATTRS128
93_mm_castsi128_pbh(__m128i __a) {
94 return (__m128bh)__a;
95}
96
97static __inline__ __m256bh __DEFAULT_FN_ATTRS256
98_mm256_castsi256_pbh(__m256i __a) {
99 return (__m256bh)__a;
100}
101
102static __inline__ __m128bh __DEFAULT_FN_ATTRS256
103_mm256_castpbf16256_pbh128(__m256bh __a) {
104 return __builtin_shufflevector(__a, __a, 0, 1, 2, 3, 4, 5, 6, 7);
105}
106
107static __inline__ __m256bh __DEFAULT_FN_ATTRS256
108_mm256_castpbf16128_pbh256(__m128bh __a) {
109 return __builtin_shufflevector(__a, __a, 0, 1, 2, 3, 4, 5, 6, 7, -1, -1, -1,
110 -1, -1, -1, -1, -1);
111}
112
113static __inline__ __m256bh __DEFAULT_FN_ATTRS256
114_mm256_zextpbf16128_pbh256(__m128bh __a) {
115 return __builtin_shufflevector(__a, (__v8bf)_mm_setzero_pbh(), 0, 1, 2, 3, 4,
116 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
117}
118
119static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_undefined_pbh(void) {
120 return (__m256bh)__builtin_ia32_undef256();
121}
122
123static __inline__ __m128bh __DEFAULT_FN_ATTRS128
124_mm_load_sbh(void const *__dp) {
125 __m128bh src = (__v8bf)_mm_setzero_pbh();
126 return (__m128bh)__builtin_ia32_loadsbf16128_mask((const __v8bf *)__dp, src,
127 1);
128}
129
130static __inline__ __m128bh __DEFAULT_FN_ATTRS128
131_mm_mask_load_sbh(__m128bh __W, __mmask8 __U, const void *__A) {
132 __m128bh src = (__v8bf)__builtin_shufflevector(
133 (__v8bf)__W, (__v8bf)_mm_setzero_pbh(), 0, 8, 8, 8, 8, 8, 8, 8);
134
135 return (__m128bh)__builtin_ia32_loadsbf16128_mask((const __v8bf *)__A, src,
136 __U & 1);
137}
138
139static __inline__ __m128bh __DEFAULT_FN_ATTRS128
140_mm_maskz_load_sbh(__mmask8 __U, const void *__A) {
141 return (__m128bh)__builtin_ia32_loadsbf16128_mask(
142 (const __v8bf *)__A, (__v8bf)_mm_setzero_pbh(), __U & 1);
143}
144
145static __inline__ __m256bh __DEFAULT_FN_ATTRS256
146_mm256_load_pbh(void const *__p) {
147 return *(const __m256bh *)__p;
148}
149
150static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_load_pbh(void const *__p) {
151 return *(const __m128bh *)__p;
152}
153
154static __inline__ __m256bh __DEFAULT_FN_ATTRS256
155_mm256_loadu_pbh(void const *__p) {
156 struct __loadu_pbh {
157 __m256bh_u __v;
158 } __attribute__((__packed__, __may_alias__));
159 return ((const struct __loadu_pbh *)__p)->__v;
160}
161
162static __inline__ __m128bh __DEFAULT_FN_ATTRS128
163_mm_loadu_pbh(void const *__p) {
164 struct __loadu_pbh {
165 __m128bh_u __v;
166 } __attribute__((__packed__, __may_alias__));
167 return ((const struct __loadu_pbh *)__p)->__v;
168}
169
170static __inline__ void __DEFAULT_FN_ATTRS128 _mm_store_sbh(void *__dp,
171 __m128bh __a) {
172 struct __mm_store_sbh_struct {
173 __bf16 __u;
174 } __attribute__((__packed__, __may_alias__));
175 ((struct __mm_store_sbh_struct *)__dp)->__u = __a[0];
176}
177
178static __inline__ void __DEFAULT_FN_ATTRS128 _mm_mask_store_sbh(void *__W,
179 __mmask8 __U,
180 __m128bh __A) {
181 __builtin_ia32_storesbf16128_mask((__v8bf *)__W, __A, __U & 1);
182}
183
184static __inline__ void __DEFAULT_FN_ATTRS256 _mm256_store_pbh(void *__P,
185 __m256bh __A) {
186 *(__m256bh *)__P = __A;
187}
188
189static __inline__ void __DEFAULT_FN_ATTRS128 _mm_store_pbh(void *__P,
190 __m128bh __A) {
191 *(__m128bh *)__P = __A;
192}
193
194static __inline__ void __DEFAULT_FN_ATTRS256 _mm256_storeu_pbh(void *__P,
195 __m256bh __A) {
196 struct __storeu_pbh {
197 __m256bh_u __v;
198 } __attribute__((__packed__, __may_alias__));
199 ((struct __storeu_pbh *)__P)->__v = __A;
200}
201
202static __inline__ void __DEFAULT_FN_ATTRS128 _mm_storeu_pbh(void *__P,
203 __m128bh __A) {
204 struct __storeu_pbh {
205 __m128bh_u __v;
206 } __attribute__((__packed__, __may_alias__));
207 ((struct __storeu_pbh *)__P)->__v = __A;
208}
209
210static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_move_sbh(__m128bh __a,
211 __m128bh __b) {
212 __a[0] = __b[0];
213 return __a;
214}
215
216static __inline__ __m128bh __DEFAULT_FN_ATTRS128
217_mm_mask_move_sbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
218 return __builtin_ia32_selectsbf_128(__U, _mm_move_sbh(__A, __B), __W);
219}
220
221static __inline__ __m128bh __DEFAULT_FN_ATTRS128
222_mm_maskz_move_sbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
223 return __builtin_ia32_selectsbf_128(__U, _mm_move_sbh(__A, __B),
224 _mm_setzero_pbh());
225}
226
227static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_undefined_pbh(void) {
228 return (__m128bh)__builtin_ia32_undef128();
229}
230
231static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_set_sbh(__bf16 bf) {
232 return (__v8bf)__builtin_shufflevector(
233 (__v8bf){bf, bf, bf, bf, bf, bf, bf, bf}, (__v8bf)_mm_setzero_pbh(), 0, 8,
234 8, 8, 8, 8, 8, 8);
235}
236
237static __inline __m128bh __DEFAULT_FN_ATTRS128 _mm_set1_pbh(__bf16 bf) {
238 return (__m128bh)(__v8bf){bf, bf, bf, bf, bf, bf, bf, bf};
239}
240
241static __inline __m256bh __DEFAULT_FN_ATTRS256 _mm256_set1_pbh(__bf16 bf) {
242 return (__m256bh)(__v16bf){bf, bf, bf, bf, bf, bf, bf, bf,
243 bf, bf, bf, bf, bf, bf, bf, bf};
244}
245
246static __inline __m128bh __DEFAULT_FN_ATTRS128
247_mm_set_pbh(__bf16 bf1, __bf16 bf2, __bf16 bf3, __bf16 bf4, __bf16 bf5,
248 __bf16 bf6, __bf16 bf7, __bf16 bf8) {
249 return (__m128bh)(__v8bf){bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8};
250}
251
252static __inline __m256bh __DEFAULT_FN_ATTRS256 _mm256_set_pbh(
253 __bf16 bf1, __bf16 bf2, __bf16 bf3, __bf16 bf4, __bf16 bf5, __bf16 bf6,
254 __bf16 bf7, __bf16 bf8, __bf16 bf9, __bf16 bf10, __bf16 bf11, __bf16 bf12,
255 __bf16 bf13, __bf16 bf14, __bf16 bf15, __bf16 bf16) {
256 return (__m256bh)(__v16bf){bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8,
257 bf9, bf10, bf11, bf12, bf13, bf14, bf15, bf16};
258}
259
260#define _mm_setr_pbh(bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8) \
261 _mm_set_pbh((bf8), (bf7), (bf6), (bf5), (bf4), (bf3), (bf2), (bf1))
262
263#define _mm256_setr_pbh(bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8, bf9, bf10, \
264 bf11, bf12, bf13, bf14, bf15, bf16) \
265 _mm256_set_pbh((bf16), (bf15), (bf14), (bf13), (bf12), (bf11), (bf10), \
266 (bf9), (bf8), (bf7), (bf6), (bf5), (bf4), (bf3), (bf2), \
267 (bf1))
268
269static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_abs_pbh(__m256bh __A) {
270 return (__m256bh)_mm256_and_epi32(_mm256_set1_epi32(0x7FFF7FFF),
271 (__m256i)__A);
272}
273
274static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_abs_pbh(__m128bh __A) {
275 return (__m128bh)_mm_and_epi32(_mm_set1_epi32(0x7FFF7FFF), (__m128i)__A);
276}
277
278static __inline__ __m128bh __DEFAULT_FN_ATTRS128
279_mm_mask_blend_pbh(__mmask8 __U, __m128bh __A, __m128bh __W) {
280 return (__m128bh)__builtin_ia32_selectpbf_128((__mmask8)__U, (__v8bf)__W,
281 (__v8bf)__A);
282}
283
284static __inline__ __m256bh __DEFAULT_FN_ATTRS256
285_mm256_mask_blend_pbh(__mmask16 __U, __m256bh __A, __m256bh __W) {
286 return (__m256bh)__builtin_ia32_selectpbf_256((__mmask16)__U, (__v16bf)__W,
287 (__v16bf)__A);
288}
289
290static __inline__ __m128bh __DEFAULT_FN_ATTRS128
291_mm_permutex2var_pbh(__m128bh __A, __m128i __I, __m128bh __B) {
292 return (__m128bh)__builtin_ia32_vpermi2varhi128((__v8hi)__A, (__v8hi)__I,
293 (__v8hi)__B);
294}
295
296static __inline__ __m256bh __DEFAULT_FN_ATTRS256
297_mm256_permutex2var_pbh(__m256bh __A, __m256i __I, __m256bh __B) {
298 return (__m256bh)__builtin_ia32_vpermi2varhi256((__v16hi)__A, (__v16hi)__I,
299 (__v16hi)__B);
300}
301
302static __inline__ __m128bh __DEFAULT_FN_ATTRS128
303_mm_permutexvar_pbh(__m128i __A, __m128bh __B) {
304 return (__m128bh)__builtin_ia32_permvarhi128((__v8hi)__B, (__v8hi)__A);
305}
306
307static __inline__ __m256bh __DEFAULT_FN_ATTRS256
308_mm256_permutexvar_pbh(__m256i __A, __m256bh __B) {
309 return (__m256bh)__builtin_ia32_permvarhi256((__v16hi)__B, (__v16hi)__A);
310}
311
312static __inline__ __m256bh __DEFAULT_FN_ATTRS256
313_mm256_addne_pbh(__m256bh __A, __m256bh __B) {
314 return (__m256bh)((__v16bf)__A + (__v16bf)__B);
315}
316
317static __inline__ __m256bh __DEFAULT_FN_ATTRS256
318_mm256_mask_addne_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
319 return (__m256bh)__builtin_ia32_selectpbf_256(
320 (__mmask16)__U, (__v16bf)_mm256_addne_pbh(__A, __B), (__v16bf)__W);
321}
322
323static __inline__ __m256bh __DEFAULT_FN_ATTRS256
324_mm256_maskz_addne_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
325 return (__m256bh)__builtin_ia32_selectpbf_256(
326 (__mmask16)__U, (__v16bf)_mm256_addne_pbh(__A, __B),
327 (__v16bf)_mm256_setzero_pbh());
328}
329
330static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_addne_pbh(__m128bh __A,
331 __m128bh __B) {
332 return (__m128bh)((__v8bf)__A + (__v8bf)__B);
333}
334
335static __inline__ __m128bh __DEFAULT_FN_ATTRS128
336_mm_mask_addne_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
337 return (__m128bh)__builtin_ia32_selectpbf_128(
338 (__mmask8)__U, (__v8bf)_mm_addne_pbh(__A, __B), (__v8bf)__W);
339}
340
341static __inline__ __m128bh __DEFAULT_FN_ATTRS128
342_mm_maskz_addne_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
343 return (__m128bh)__builtin_ia32_selectpbf_128((__mmask8)__U,
344 (__v8bf)_mm_addne_pbh(__A, __B),
345 (__v8bf)_mm_setzero_pbh());
346}
347
348static __inline__ __m256bh __DEFAULT_FN_ATTRS256
349_mm256_subne_pbh(__m256bh __A, __m256bh __B) {
350 return (__m256bh)((__v16bf)__A - (__v16bf)__B);
351}
352
353static __inline__ __m256bh __DEFAULT_FN_ATTRS256
354_mm256_mask_subne_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
355 return (__m256bh)__builtin_ia32_selectpbf_256(
356 (__mmask16)__U, (__v16bf)_mm256_subne_pbh(__A, __B), (__v16bf)__W);
357}
358
359static __inline__ __m256bh __DEFAULT_FN_ATTRS256
360_mm256_maskz_subne_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
361 return (__m256bh)__builtin_ia32_selectpbf_256(
362 (__mmask16)__U, (__v16bf)_mm256_subne_pbh(__A, __B),
363 (__v16bf)_mm256_setzero_pbh());
364}
365
366static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_subne_pbh(__m128bh __A,
367 __m128bh __B) {
368 return (__m128bh)((__v8bf)__A - (__v8bf)__B);
369}
370
371static __inline__ __m128bh __DEFAULT_FN_ATTRS128
372_mm_mask_subne_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
373 return (__m128bh)__builtin_ia32_selectpbf_128(
374 (__mmask8)__U, (__v8bf)_mm_subne_pbh(__A, __B), (__v8bf)__W);
375}
376
377static __inline__ __m128bh __DEFAULT_FN_ATTRS128
378_mm_maskz_subne_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
379 return (__m128bh)__builtin_ia32_selectpbf_128((__mmask8)__U,
380 (__v8bf)_mm_subne_pbh(__A, __B),
381 (__v8bf)_mm_setzero_pbh());
382}
383
384static __inline__ __m256bh __DEFAULT_FN_ATTRS256
385_mm256_mulne_pbh(__m256bh __A, __m256bh __B) {
386 return (__m256bh)((__v16bf)__A * (__v16bf)__B);
387}
388
389static __inline__ __m256bh __DEFAULT_FN_ATTRS256
390_mm256_mask_mulne_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
391 return (__m256bh)__builtin_ia32_selectpbf_256(
392 (__mmask16)__U, (__v16bf)_mm256_mulne_pbh(__A, __B), (__v16bf)__W);
393}
394
395static __inline__ __m256bh __DEFAULT_FN_ATTRS256
396_mm256_maskz_mulne_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
397 return (__m256bh)__builtin_ia32_selectpbf_256(
398 (__mmask16)__U, (__v16bf)_mm256_mulne_pbh(__A, __B),
399 (__v16bf)_mm256_setzero_pbh());
400}
401
402static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_mulne_pbh(__m128bh __A,
403 __m128bh __B) {
404 return (__m128bh)((__v8bf)__A * (__v8bf)__B);
405}
406
407static __inline__ __m128bh __DEFAULT_FN_ATTRS128
408_mm_mask_mulne_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
409 return (__m128bh)__builtin_ia32_selectpbf_128(
410 (__mmask8)__U, (__v8bf)_mm_mulne_pbh(__A, __B), (__v8bf)__W);
411}
412
413static __inline__ __m128bh __DEFAULT_FN_ATTRS128
414_mm_maskz_mulne_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
415 return (__m128bh)__builtin_ia32_selectpbf_128((__mmask8)__U,
416 (__v8bf)_mm_mulne_pbh(__A, __B),
417 (__v8bf)_mm_setzero_pbh());
418}
419
420static __inline__ __m256bh __DEFAULT_FN_ATTRS256
421_mm256_divne_pbh(__m256bh __A, __m256bh __B) {
422 return (__m256bh)((__v16bf)__A / (__v16bf)__B);
423}
424
425static __inline__ __m256bh __DEFAULT_FN_ATTRS256
426_mm256_mask_divne_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
427 return (__m256bh)__builtin_ia32_selectpbf_256(
428 (__mmask16)__U, (__v16bf)_mm256_divne_pbh(__A, __B), (__v16bf)__W);
429}
430
431static __inline__ __m256bh __DEFAULT_FN_ATTRS256
432_mm256_maskz_divne_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
433 return (__m256bh)__builtin_ia32_selectpbf_256(
434 (__mmask16)__U, (__v16bf)_mm256_divne_pbh(__A, __B),
435 (__v16bf)_mm256_setzero_pbh());
436}
437
438static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_divne_pbh(__m128bh __A,
439 __m128bh __B) {
440 return (__m128bh)((__v8bf)__A / (__v8bf)__B);
441}
442
443static __inline__ __m128bh __DEFAULT_FN_ATTRS128
444_mm_mask_divne_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
445 return (__m128bh)__builtin_ia32_selectpbf_128(
446 (__mmask8)__U, (__v8bf)_mm_divne_pbh(__A, __B), (__v8bf)__W);
447}
448
449static __inline__ __m128bh __DEFAULT_FN_ATTRS128
450_mm_maskz_divne_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
451 return (__m128bh)__builtin_ia32_selectpbf_128((__mmask8)__U,
452 (__v8bf)_mm_divne_pbh(__A, __B),
453 (__v8bf)_mm_setzero_pbh());
454}
455
456static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_max_pbh(__m256bh __A,
457 __m256bh __B) {
458 return (__m256bh)__builtin_ia32_vmaxpbf16256((__v16bf)__A, (__v16bf)__B);
459}
460
461static __inline__ __m256bh __DEFAULT_FN_ATTRS256
462_mm256_mask_max_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
463 return (__m256bh)__builtin_ia32_selectpbf_256(
464 (__mmask16)__U, (__v16bf)_mm256_max_pbh(__A, __B), (__v16bf)__W);
465}
466
467static __inline__ __m256bh __DEFAULT_FN_ATTRS256
468_mm256_maskz_max_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
469 return (__m256bh)__builtin_ia32_selectpbf_256(
470 (__mmask16)__U, (__v16bf)_mm256_max_pbh(__A, __B),
471 (__v16bf)_mm256_setzero_pbh());
472}
473
474static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_max_pbh(__m128bh __A,
475 __m128bh __B) {
476 return (__m128bh)__builtin_ia32_vmaxpbf16128((__v8bf)__A, (__v8bf)__B);
477}
478
479static __inline__ __m128bh __DEFAULT_FN_ATTRS128
480_mm_mask_max_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
481 return (__m128bh)__builtin_ia32_selectpbf_128(
482 (__mmask8)__U, (__v8bf)_mm_max_pbh(__A, __B), (__v8bf)__W);
483}
484
485static __inline__ __m128bh __DEFAULT_FN_ATTRS128
486_mm_maskz_max_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
487 return (__m128bh)__builtin_ia32_selectpbf_128(
488 (__mmask8)__U, (__v8bf)_mm_max_pbh(__A, __B), (__v8bf)_mm_setzero_pbh());
489}
490
491static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_min_pbh(__m256bh __A,
492 __m256bh __B) {
493 return (__m256bh)__builtin_ia32_vminpbf16256((__v16bf)__A, (__v16bf)__B);
494}
495
496static __inline__ __m256bh __DEFAULT_FN_ATTRS256
497_mm256_mask_min_pbh(__m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
498 return (__m256bh)__builtin_ia32_selectpbf_256(
499 (__mmask16)__U, (__v16bf)_mm256_min_pbh(__A, __B), (__v16bf)__W);
500}
501
502static __inline__ __m256bh __DEFAULT_FN_ATTRS256
503_mm256_maskz_min_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
504 return (__m256bh)__builtin_ia32_selectpbf_256(
505 (__mmask16)__U, (__v16bf)_mm256_min_pbh(__A, __B),
506 (__v16bf)_mm256_setzero_pbh());
507}
508
509static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_min_pbh(__m128bh __A,
510 __m128bh __B) {
511 return (__m128bh)__builtin_ia32_vminpbf16128((__v8bf)__A, (__v8bf)__B);
512}
513
514static __inline__ __m128bh __DEFAULT_FN_ATTRS128
515_mm_mask_min_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
516 return (__m128bh)__builtin_ia32_selectpbf_128(
517 (__mmask8)__U, (__v8bf)_mm_min_pbh(__A, __B), (__v8bf)__W);
518}
519
520static __inline__ __m128bh __DEFAULT_FN_ATTRS128
521_mm_maskz_min_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
522 return (__m128bh)__builtin_ia32_selectpbf_128(
523 (__mmask8)__U, (__v8bf)_mm_min_pbh(__A, __B), (__v8bf)_mm_setzero_pbh());
524}
525
526static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comeqsbh(__m128bh A,
527 __m128bh B) {
528 return __builtin_ia32_vcomsbf16eq((__v8bf)A, (__v8bf)B);
529}
530
531static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comltsbh(__m128bh A,
532 __m128bh B) {
533 return __builtin_ia32_vcomsbf16lt((__v8bf)A, (__v8bf)B);
534}
535
536static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comlesbh(__m128bh A,
537 __m128bh B) {
538 return __builtin_ia32_vcomsbf16le((__v8bf)A, (__v8bf)B);
539}
540
541static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comgtsbh(__m128bh A,
542 __m128bh B) {
543 return __builtin_ia32_vcomsbf16gt((__v8bf)A, (__v8bf)B);
544}
545
546static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comgesbh(__m128bh A,
547 __m128bh B) {
548 return __builtin_ia32_vcomsbf16ge((__v8bf)A, (__v8bf)B);
549}
550
551static __inline__ int __DEFAULT_FN_ATTRS128 _mm_comneqsbh(__m128bh A,
552 __m128bh B) {
553 return __builtin_ia32_vcomsbf16neq((__v8bf)A, (__v8bf)B);
554}
555
556#define _mm256_cmp_pbh_mask(__A, __B, __P) \
557 ((__mmask16)__builtin_ia32_vcmppbf16256_mask((__v16bf)(__m256bh)(__A), \
558 (__v16bf)(__m256bh)(__B), \
559 (int)(__P), (__mmask16) - 1))
560
561#define _mm256_mask_cmp_pbh_mask(__U, __A, __B, __P) \
562 ((__mmask16)__builtin_ia32_vcmppbf16256_mask((__v16bf)(__m256bh)(__A), \
563 (__v16bf)(__m256bh)(__B), \
564 (int)(__P), (__mmask16)(__U)))
565
566#define _mm_cmp_pbh_mask(__A, __B, __P) \
567 ((__mmask8)__builtin_ia32_vcmppbf16128_mask((__v8bf)(__m128bh)(__A), \
568 (__v8bf)(__m128bh)(__B), \
569 (int)(__P), (__mmask8) - 1))
570
571#define _mm_mask_cmp_pbh_mask(__U, __A, __B, __P) \
572 ((__mmask8)__builtin_ia32_vcmppbf16128_mask((__v8bf)(__m128bh)(__A), \
573 (__v8bf)(__m128bh)(__B), \
574 (int)(__P), (__mmask8)(__U)))
575
576#define _mm256_mask_fpclass_pbh_mask(__U, __A, imm) \
577 ((__mmask16)__builtin_ia32_vfpclasspbf16256_mask( \
578 (__v16bf)(__m256bh)(__A), (int)(imm), (__mmask16)(__U)))
579
580#define _mm256_fpclass_pbh_mask(__A, imm) \
581 ((__mmask16)__builtin_ia32_vfpclasspbf16256_mask( \
582 (__v16bf)(__m256bh)(__A), (int)(imm), (__mmask16) - 1))
583
584#define _mm_mask_fpclass_pbh_mask(__U, __A, imm) \
585 ((__mmask8)__builtin_ia32_vfpclasspbf16128_mask( \
586 (__v8bf)(__m128bh)(__A), (int)(imm), (__mmask8)(__U)))
587
588#define _mm_fpclass_pbh_mask(__A, imm) \
589 ((__mmask8)__builtin_ia32_vfpclasspbf16128_mask((__v8bf)(__m128bh)(__A), \
590 (int)(imm), (__mmask8) - 1))
591
592static __inline__ __m256bh __DEFAULT_FN_ATTRS256
593_mm256_scalef_pbh(__m256bh __A, __m256bh __B) {
594 return (__m256bh)__builtin_ia32_vscalefpbf16256_mask(
595 (__v16bf)__A, (__v16bf)__B, (__v16bf)_mm256_undefined_pbh(),
596 (__mmask16)-1);
597}
598
599static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask_scalef_pbh(
600 __m256bh __W, __mmask16 __U, __m256bh __A, __m256bh __B) {
601 return (__m256bh)__builtin_ia32_vscalefpbf16256_mask(
602 (__v16bf)__A, (__v16bf)__B, (__v16bf)__W, (__mmask16)__U);
603}
604
605static __inline__ __m256bh __DEFAULT_FN_ATTRS256
606_mm256_maskz_scalef_pbh(__mmask16 __U, __m256bh __A, __m256bh __B) {
607 return (__m256bh)__builtin_ia32_vscalefpbf16256_mask(
608 (__v16bf)__A, (__v16bf)__B, (__v16bf)_mm256_setzero_pbh(),
609 (__mmask16)__U);
610}
611
612static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_scalef_pbh(__m128bh __A,
613 __m128bh __B) {
614 return (__m128bh)__builtin_ia32_vscalefpbf16128_mask(
615 (__v8bf)__A, (__v8bf)__B, (__v8bf)_mm_undefined_pbh(), (__mmask8)-1);
616}
617
618static __inline__ __m128bh __DEFAULT_FN_ATTRS128
619_mm_mask_scalef_pbh(__m128bh __W, __mmask8 __U, __m128bh __A, __m128bh __B) {
620 return (__m128bh)__builtin_ia32_vscalefpbf16128_mask(
621 (__v8bf)__A, (__v8bf)__B, (__v8bf)__W, (__mmask8)__U);
622}
623
624static __inline__ __m128bh __DEFAULT_FN_ATTRS128
625_mm_maskz_scalef_pbh(__mmask8 __U, __m128bh __A, __m128bh __B) {
626 return (__m128bh)__builtin_ia32_vscalefpbf16128_mask(
627 (__v8bf)__A, (__v8bf)__B, (__v8bf)_mm_setzero_pbh(), (__mmask8)__U);
628}
629
630static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_rcp_pbh(__m256bh __A) {
631 return (__m256bh)__builtin_ia32_vrcppbf16256_mask(
632 (__v16bf)__A, (__v16bf)_mm256_undefined_pbh(), (__mmask16)-1);
633}
634
635static __inline__ __m256bh __DEFAULT_FN_ATTRS256
636_mm256_mask_rcp_pbh(__m256bh __W, __mmask16 __U, __m256bh __A) {
637 return (__m256bh)__builtin_ia32_vrcppbf16256_mask((__v16bf)__A, (__v16bf)__W,
638 (__mmask16)__U);
639}
640
641static __inline__ __m256bh __DEFAULT_FN_ATTRS256
642_mm256_maskz_rcp_pbh(__mmask16 __U, __m256bh __A) {
643 return (__m256bh)__builtin_ia32_vrcppbf16256_mask(
644 (__v16bf)__A, (__v16bf)_mm256_setzero_pbh(), (__mmask16)__U);
645}
646
647static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_rcp_pbh(__m128bh __A) {
648 return (__m128bh)__builtin_ia32_vrcppbf16128_mask(
649 (__v8bf)__A, (__v8bf)_mm_undefined_pbh(), (__mmask8)-1);
650}
651
652static __inline__ __m128bh __DEFAULT_FN_ATTRS128
653_mm_mask_rcp_pbh(__m128bh __W, __mmask8 __U, __m128bh __A) {
654 return (__m128bh)__builtin_ia32_vrcppbf16128_mask((__v8bf)__A, (__v8bf)__W,
655 (__mmask8)__U);
656}
657
658static __inline__ __m128bh __DEFAULT_FN_ATTRS128
659_mm_maskz_rcp_pbh(__mmask8 __U, __m128bh __A) {
660 return (__m128bh)__builtin_ia32_vrcppbf16128_mask(
661 (__v8bf)__A, (__v8bf)_mm_setzero_pbh(), (__mmask8)__U);
662}
663
664static __inline__ __m256bh __DEFAULT_FN_ATTRS256
665_mm256_getexp_pbh(__m256bh __A) {
666 return (__m256bh)__builtin_ia32_vgetexppbf16256_mask(
667 (__v16bf)__A, (__v16bf)_mm256_undefined_pbh(), (__mmask16)-1);
668}
669
670static __inline__ __m256bh __DEFAULT_FN_ATTRS256
671_mm256_mask_getexp_pbh(__m256bh __W, __mmask16 __U, __m256bh __A) {
672 return (__m256bh)__builtin_ia32_vgetexppbf16256_mask(
673 (__v16bf)__A, (__v16bf)__W, (__mmask16)__U);
674}
675
676static __inline__ __m256bh __DEFAULT_FN_ATTRS256
677_mm256_maskz_getexp_pbh(__mmask16 __U, __m256bh __A) {
678 return (__m256bh)__builtin_ia32_vgetexppbf16256_mask(
679 (__v16bf)__A, (__v16bf)_mm256_setzero_pbh(), (__mmask16)__U);
680}
681
682static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_getexp_pbh(__m128bh __A) {
683 return (__m128bh)__builtin_ia32_vgetexppbf16128_mask(
684 (__v8bf)__A, (__v8bf)_mm_undefined_pbh(), (__mmask8)-1);
685}
686
687static __inline__ __m128bh __DEFAULT_FN_ATTRS128
688_mm_mask_getexp_pbh(__m128bh __W, __mmask8 __U, __m128bh __A) {
689 return (__m128bh)__builtin_ia32_vgetexppbf16128_mask((__v8bf)__A, (__v8bf)__W,
690 (__mmask8)__U);
691}
692
693static __inline__ __m128bh __DEFAULT_FN_ATTRS128
694_mm_maskz_getexp_pbh(__mmask8 __U, __m128bh __A) {
695 return (__m128bh)__builtin_ia32_vgetexppbf16128_mask(
696 (__v8bf)__A, (__v8bf)_mm_setzero_pbh(), (__mmask8)__U);
697}
698
699static __inline__ __m256bh __DEFAULT_FN_ATTRS256
700_mm256_rsqrt_pbh(__m256bh __A) {
701 return (__m256bh)__builtin_ia32_vrsqrtpbf16256_mask(
702 (__v16bf)__A, (__v16bf)_mm256_undefined_pbh(), (__mmask16)-1);
703}
704
705static __inline__ __m256bh __DEFAULT_FN_ATTRS256
706_mm256_mask_rsqrt_pbh(__m256bh __W, __mmask16 __U, __m256bh __A) {
707 return (__m256bh)__builtin_ia32_vrsqrtpbf16256_mask(
708 (__v16bf)__A, (__v16bf)__W, (__mmask16)__U);
709}
710
711static __inline__ __m256bh __DEFAULT_FN_ATTRS256
712_mm256_maskz_rsqrt_pbh(__mmask16 __U, __m256bh __A) {
713 return (__m256bh)__builtin_ia32_vrsqrtpbf16256_mask(
714 (__v16bf)__A, (__v16bf)_mm256_setzero_pbh(), (__mmask16)__U);
715}
716
717static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_rsqrt_pbh(__m128bh __A) {
718 return (__m128bh)__builtin_ia32_vrsqrtpbf16128_mask(
719 (__v8bf)__A, (__v8bf)_mm_undefined_pbh(), (__mmask8)-1);
720}
721
722static __inline__ __m128bh __DEFAULT_FN_ATTRS128
723_mm_mask_rsqrt_pbh(__m128bh __W, __mmask8 __U, __m128bh __A) {
724 return (__m128bh)__builtin_ia32_vrsqrtpbf16128_mask((__v8bf)__A, (__v8bf)__W,
725 (__mmask8)__U);
726}
727
728static __inline__ __m128bh __DEFAULT_FN_ATTRS128
729_mm_maskz_rsqrt_pbh(__mmask8 __U, __m128bh __A) {
730 return (__m128bh)__builtin_ia32_vrsqrtpbf16128_mask(
731 (__v8bf)__A, (__v8bf)_mm_setzero_pbh(), (__mmask8)__U);
732}
733
734#define _mm256_reducene_pbh(__A, imm) \
735 ((__m256bh)__builtin_ia32_vreducenepbf16256_mask( \
736 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_undefined_pbh(), \
737 (__mmask16) - 1))
738
739#define _mm256_mask_reducene_pbh(__W, __U, __A, imm) \
740 ((__m256bh)__builtin_ia32_vreducenepbf16256_mask( \
741 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)(__m256bh)(__W), \
742 (__mmask16)(__U)))
743
744#define _mm256_maskz_reducene_pbh(__U, __A, imm) \
745 ((__m256bh)__builtin_ia32_vreducenepbf16256_mask( \
746 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
747 (__mmask16)(__U)))
748
749#define _mm_reducene_pbh(__A, imm) \
750 ((__m128bh)__builtin_ia32_vreducenepbf16128_mask( \
751 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_undefined_pbh(), \
752 (__mmask8) - 1))
753
754#define _mm_mask_reducene_pbh(__W, __U, __A, imm) \
755 ((__m128bh)__builtin_ia32_vreducenepbf16128_mask( \
756 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)(__m128bh)(__W), \
757 (__mmask8)(__U)))
758
759#define _mm_maskz_reducene_pbh(__U, __A, imm) \
760 ((__m128bh)__builtin_ia32_vreducenepbf16128_mask( \
761 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
762 (__mmask8)(__U)))
763
764#define _mm256_roundscalene_pbh(__A, imm) \
765 ((__m256bh)__builtin_ia32_vrndscalenepbf16_256_mask( \
766 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
767 (__mmask16) - 1))
768
769#define _mm256_mask_roundscalene_pbh(__W, __U, __A, imm) \
770 ((__m256bh)__builtin_ia32_vrndscalenepbf16_256_mask( \
771 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)(__m256bh)(__W), \
772 (__mmask16)(__U)))
773
774#define _mm256_maskz_roundscalene_pbh(__U, __A, imm) \
775 ((__m256bh)__builtin_ia32_vrndscalenepbf16_256_mask( \
776 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
777 (__mmask16)(__U)))
778
779#define _mm_roundscalene_pbh(__A, imm) \
780 ((__m128bh)__builtin_ia32_vrndscalenepbf16_128_mask( \
781 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
782 (__mmask8) - 1))
783
784#define _mm_mask_roundscalene_pbh(__W, __U, __A, imm) \
785 ((__m128bh)__builtin_ia32_vrndscalenepbf16_128_mask( \
786 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)(__m128bh)(__W), \
787 (__mmask8)(__U)))
788
789#define _mm_maskz_roundscalene_pbh(__U, __A, imm) \
790 ((__m128bh)__builtin_ia32_vrndscalenepbf16_128_mask( \
791 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
792 (__mmask8)(__U)))
793
794#define _mm256_getmant_pbh(__A, __B, __C) \
795 ((__m256bh)__builtin_ia32_vgetmantpbf16256_mask( \
796 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
797 (__v16bf)_mm256_undefined_pbh(), (__mmask16) - 1))
798
799#define _mm256_mask_getmant_pbh(__W, __U, __A, __B, __C) \
800 ((__m256bh)__builtin_ia32_vgetmantpbf16256_mask( \
801 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
802 (__v16bf)(__m256bh)(__W), (__mmask16)(__U)))
803
804#define _mm256_maskz_getmant_pbh(__U, __A, __B, __C) \
805 ((__m256bh)__builtin_ia32_vgetmantpbf16256_mask( \
806 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
807 (__v16bf)_mm256_setzero_pbh(), (__mmask16)(__U)))
808
809#define _mm_getmant_pbh(__A, __B, __C) \
810 ((__m128bh)__builtin_ia32_vgetmantpbf16128_mask( \
811 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
812 (__v8bf)_mm_undefined_pbh(), (__mmask8) - 1))
813
814#define _mm_mask_getmant_pbh(__W, __U, __A, __B, __C) \
815 ((__m128bh)__builtin_ia32_vgetmantpbf16128_mask( \
816 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
817 (__v8bf)(__m128bh)(__W), (__mmask8)(__U)))
818
819#define _mm_maskz_getmant_pbh(__U, __A, __B, __C) \
820 ((__m128bh)__builtin_ia32_vgetmantpbf16128_mask( \
821 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
822 (__v8bf)_mm_setzero_pbh(), (__mmask8)(__U)))
823
824static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_sqrt_pbh(__m256bh __A) {
825 return (__m256bh)__builtin_ia32_vsqrtnepbf16256((__v16bf)__A);
826}
827
828static __inline__ __m256bh __DEFAULT_FN_ATTRS256
829_mm256_mask_sqrt_pbh(__m256bh __W, __mmask16 __U, __m256bh __A) {
830 return (__m256bh)__builtin_ia32_selectpbf_256(
831 (__mmask16)__U, (__v16bf)_mm256_sqrt_pbh(__A), (__v16bf)__W);
832}
833
834static __inline__ __m256bh __DEFAULT_FN_ATTRS256
835_mm256_maskz_sqrt_pbh(__mmask16 __U, __m256bh __A) {
836 return (__m256bh)__builtin_ia32_selectpbf_256((__mmask16)__U,
837 (__v16bf)_mm256_sqrt_pbh(__A),
838 (__v16bf)_mm256_setzero_pbh());
839}
840
841static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_sqrt_pbh(__m128bh __A) {
842 return (__m128bh)__builtin_ia32_vsqrtnepbf16((__v8bf)__A);
843}
844
845static __inline__ __m128bh __DEFAULT_FN_ATTRS128
846_mm_mask_sqrt_pbh(__m128bh __W, __mmask8 __U, __m128bh __A) {
847 return (__m128bh)__builtin_ia32_selectpbf_128(
848 (__mmask8)__U, (__v8bf)_mm_sqrt_pbh(__A), (__v8bf)__W);
849}
850
851static __inline__ __m128bh __DEFAULT_FN_ATTRS128
852_mm_maskz_sqrt_pbh(__mmask8 __U, __m128bh __A) {
853 return (__m128bh)__builtin_ia32_selectpbf_128(
854 (__mmask8)__U, (__v8bf)_mm_sqrt_pbh(__A), (__v8bf)_mm_setzero_pbh());
855}
856
857static __inline__ __m256bh __DEFAULT_FN_ATTRS256
858_mm256_fmaddne_pbh(__m256bh __A, __m256bh __B, __m256bh __C) {
859 return (__m256bh)__builtin_ia32_vfmaddnepbh256((__v16bf)__A, (__v16bf)__B,
860 (__v16bf)__C);
861}
862
863static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask_fmaddne_pbh(
864 __m256bh __A, __mmask16 __U, __m256bh __B, __m256bh __C) {
865 return (__m256bh)__builtin_ia32_selectpbf_256(
866 (__mmask16)__U,
867 _mm256_fmaddne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
868 (__v16bf)__A);
869}
870
871static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask3_fmaddne_pbh(
872 __m256bh __A, __m256bh __B, __m256bh __C, __mmask16 __U) {
873 return (__m256bh)__builtin_ia32_selectpbf_256(
874 (__mmask16)__U,
875 _mm256_fmaddne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
876 (__v16bf)__C);
877}
878
879static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_maskz_fmaddne_pbh(
880 __mmask16 __U, __m256bh __A, __m256bh __B, __m256bh __C) {
881 return (__m256bh)__builtin_ia32_selectpbf_256(
882 (__mmask16)__U,
883 _mm256_fmaddne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
884 (__v16bf)_mm256_setzero_pbh());
885}
886
887static __inline__ __m256bh __DEFAULT_FN_ATTRS256
888_mm256_fmsubne_pbh(__m256bh __A, __m256bh __B, __m256bh __C) {
889 return (__m256bh)__builtin_ia32_vfmaddnepbh256((__v16bf)__A, (__v16bf)__B,
890 -(__v16bf)__C);
891}
892
893static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask_fmsubne_pbh(
894 __m256bh __A, __mmask16 __U, __m256bh __B, __m256bh __C) {
895 return (__m256bh)__builtin_ia32_selectpbf_256(
896 (__mmask16)__U,
897 _mm256_fmsubne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
898 (__v16bf)__A);
899}
900
901static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask3_fmsubne_pbh(
902 __m256bh __A, __m256bh __B, __m256bh __C, __mmask16 __U) {
903 return (__m256bh)__builtin_ia32_selectpbf_256(
904 (__mmask16)__U,
905 _mm256_fmsubne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
906 (__v16bf)__C);
907}
908
909static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_maskz_fmsubne_pbh(
910 __mmask16 __U, __m256bh __A, __m256bh __B, __m256bh __C) {
911 return (__m256bh)__builtin_ia32_selectpbf_256(
912 (__mmask16)__U,
913 _mm256_fmsubne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
914 (__v16bf)_mm256_setzero_pbh());
915}
916
917static __inline__ __m256bh __DEFAULT_FN_ATTRS256
918_mm256_fnmaddne_pbh(__m256bh __A, __m256bh __B, __m256bh __C) {
919 return (__m256bh)__builtin_ia32_vfmaddnepbh256((__v16bf)__A, -(__v16bf)__B,
920 (__v16bf)__C);
921}
922
923static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask_fnmaddne_pbh(
924 __m256bh __A, __mmask16 __U, __m256bh __B, __m256bh __C) {
925 return (__m256bh)__builtin_ia32_selectpbf_256(
926 (__mmask16)__U,
927 _mm256_fnmaddne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
928 (__v16bf)__A);
929}
930
931static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask3_fnmaddne_pbh(
932 __m256bh __A, __m256bh __B, __m256bh __C, __mmask16 __U) {
933 return (__m256bh)__builtin_ia32_selectpbf_256(
934 (__mmask16)__U,
935 _mm256_fnmaddne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
936 (__v16bf)__C);
937}
938
939static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_maskz_fnmaddne_pbh(
940 __mmask16 __U, __m256bh __A, __m256bh __B, __m256bh __C) {
941 return (__m256bh)__builtin_ia32_selectpbf_256(
942 (__mmask16)__U,
943 _mm256_fnmaddne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
944 (__v16bf)_mm256_setzero_pbh());
945}
946
947static __inline__ __m256bh __DEFAULT_FN_ATTRS256
948_mm256_fnmsubne_pbh(__m256bh __A, __m256bh __B, __m256bh __C) {
949 return (__m256bh)__builtin_ia32_vfmaddnepbh256((__v16bf)__A, -(__v16bf)__B,
950 -(__v16bf)__C);
951}
952
953static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask_fnmsubne_pbh(
954 __m256bh __A, __mmask16 __U, __m256bh __B, __m256bh __C) {
955 return (__m256bh)__builtin_ia32_selectpbf_256(
956 (__mmask16)__U,
957 _mm256_fnmsubne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
958 (__v16bf)__A);
959}
960
961static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_mask3_fnmsubne_pbh(
962 __m256bh __A, __m256bh __B, __m256bh __C, __mmask16 __U) {
963 return (__m256bh)__builtin_ia32_selectpbf_256(
964 (__mmask16)__U,
965 _mm256_fnmsubne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
966 (__v16bf)__C);
967}
968
969static __inline__ __m256bh __DEFAULT_FN_ATTRS256 _mm256_maskz_fnmsubne_pbh(
970 __mmask16 __U, __m256bh __A, __m256bh __B, __m256bh __C) {
971 return (__m256bh)__builtin_ia32_selectpbf_256(
972 (__mmask16)__U,
973 _mm256_fnmsubne_pbh((__v16bf)__A, (__v16bf)__B, (__v16bf)__C),
974 (__v16bf)_mm256_setzero_pbh());
975}
976
977static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_fmaddne_pbh(__m128bh __A,
978 __m128bh __B,
979 __m128bh __C) {
980 return (__m128bh)__builtin_ia32_vfmaddnepbh128((__v8bf)__A, (__v8bf)__B,
981 (__v8bf)__C);
982}
983
984static __inline__ __m128bh __DEFAULT_FN_ATTRS128
985_mm_mask_fmaddne_pbh(__m128bh __A, __mmask8 __U, __m128bh __B, __m128bh __C) {
986 return (__m128bh)__builtin_ia32_selectpbf_128(
987 (__mmask8)__U, _mm_fmaddne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
988 (__v8bf)__A);
989}
990
991static __inline__ __m128bh __DEFAULT_FN_ATTRS128
992_mm_mask3_fmaddne_pbh(__m128bh __A, __m128bh __B, __m128bh __C, __mmask8 __U) {
993 return (__m128bh)__builtin_ia32_selectpbf_128(
994 (__mmask8)__U, _mm_fmaddne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
995 (__v8bf)__C);
996}
997
998static __inline__ __m128bh __DEFAULT_FN_ATTRS128
999_mm_maskz_fmaddne_pbh(__mmask8 __U, __m128bh __A, __m128bh __B, __m128bh __C) {
1000 return (__m128bh)__builtin_ia32_selectpbf_128(
1001 (__mmask8)__U, _mm_fmaddne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1002 (__v8bf)_mm_setzero_pbh());
1003}
1004
1005static __inline__ __m128bh __DEFAULT_FN_ATTRS128 _mm_fmsubne_pbh(__m128bh __A,
1006 __m128bh __B,
1007 __m128bh __C) {
1008 return (__m128bh)__builtin_ia32_vfmaddnepbh128((__v8bf)__A, (__v8bf)__B,
1009 -(__v8bf)__C);
1010}
1011
1012static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1013_mm_mask_fmsubne_pbh(__m128bh __A, __mmask8 __U, __m128bh __B, __m128bh __C) {
1014 return (__m128bh)__builtin_ia32_selectpbf_128(
1015 (__mmask8)__U, _mm_fmsubne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1016 (__v8bf)__A);
1017}
1018
1019static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1020_mm_mask3_fmsubne_pbh(__m128bh __A, __m128bh __B, __m128bh __C, __mmask8 __U) {
1021 return (__m128bh)__builtin_ia32_selectpbf_128(
1022 (__mmask8)__U, _mm_fmsubne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1023 (__v8bf)__C);
1024}
1025
1026static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1027_mm_maskz_fmsubne_pbh(__mmask8 __U, __m128bh __A, __m128bh __B, __m128bh __C) {
1028 return (__m128bh)__builtin_ia32_selectpbf_128(
1029 (__mmask8)__U, _mm_fmsubne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1030 (__v8bf)_mm_setzero_pbh());
1031}
1032
1033static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1034_mm_fnmaddne_pbh(__m128bh __A, __m128bh __B, __m128bh __C) {
1035 return (__m128bh)__builtin_ia32_vfmaddnepbh128((__v8bf)__A, -(__v8bf)__B,
1036 (__v8bf)__C);
1037}
1038
1039static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1040_mm_mask_fnmaddne_pbh(__m128bh __A, __mmask8 __U, __m128bh __B, __m128bh __C) {
1041 return (__m128bh)__builtin_ia32_selectpbf_128(
1042 (__mmask8)__U, _mm_fnmaddne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1043 (__v8bf)__A);
1044}
1045
1046static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1047_mm_mask3_fnmaddne_pbh(__m128bh __A, __m128bh __B, __m128bh __C, __mmask8 __U) {
1048 return (__m128bh)__builtin_ia32_selectpbf_128(
1049 (__mmask8)__U, _mm_fnmaddne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1050 (__v8bf)__C);
1051}
1052
1053static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1054_mm_maskz_fnmaddne_pbh(__mmask8 __U, __m128bh __A, __m128bh __B, __m128bh __C) {
1055 return (__m128bh)__builtin_ia32_selectpbf_128(
1056 (__mmask8)__U, _mm_fnmaddne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1057 (__v8bf)_mm_setzero_pbh());
1058}
1059
1060static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1061_mm_fnmsubne_pbh(__m128bh __A, __m128bh __B, __m128bh __C) {
1062 return (__m128bh)__builtin_ia32_vfmaddnepbh128((__v8bf)__A, -(__v8bf)__B,
1063 -(__v8bf)__C);
1064}
1065
1066static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1067_mm_mask_fnmsubne_pbh(__m128bh __A, __mmask8 __U, __m128bh __B, __m128bh __C) {
1068 return (__m128bh)__builtin_ia32_selectpbf_128(
1069 (__mmask8)__U, _mm_fnmsubne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1070 (__v8bf)__A);
1071}
1072
1073static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1074_mm_mask3_fnmsubne_pbh(__m128bh __A, __m128bh __B, __m128bh __C, __mmask8 __U) {
1075 return (__m128bh)__builtin_ia32_selectpbf_128(
1076 (__mmask8)__U, _mm_fnmsubne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1077 (__v8bf)__C);
1078}
1079
1080static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1081_mm_maskz_fnmsubne_pbh(__mmask8 __U, __m128bh __A, __m128bh __B, __m128bh __C) {
1082 return (__m128bh)__builtin_ia32_selectpbf_128(
1083 (__mmask8)__U, _mm_fnmsubne_pbh((__v8bf)__A, (__v8bf)__B, (__v8bf)__C),
1084 (__v8bf)_mm_setzero_pbh());
1085}
1086
1087#undef __DEFAULT_FN_ATTRS128
1088#undef __DEFAULT_FN_ATTRS256
1089
1090#endif
1091#endif
_Float16 __2f16 __attribute__((ext_vector_type(2)))
Zeroes the upper 128 bits (bits 255:128) of all YMM registers.
static __inline__ vector float vector float __b
Definition: altivec.h:578
static __inline__ uint32_t volatile uint32_t * __p
Definition: arm_acle.h:88
#define __DEFAULT_FN_ATTRS128
#define __DEFAULT_FN_ATTRS256
unsigned char __mmask8
Definition: avx512fintrin.h:41
unsigned short __mmask16
Definition: avx512fintrin.h:42
static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_and_epi32(__m256i __a, __m256i __b)
static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_and_epi32(__m128i __a, __m128i __b)
static __inline __m256 __DEFAULT_FN_ATTRS_CONSTEXPR _mm256_setzero_ps(void)
Constructs a 256-bit floating-point vector of [8 x float] with all vector elements initialized to zer...
Definition: avxintrin.h:4354
static __inline __m256i __DEFAULT_FN_ATTRS _mm256_set1_epi32(int __i)
Constructs a 256-bit integer vector of [8 x i32], with each of the 32-bit integral vector elements se...
Definition: avxintrin.h:4271
static __inline__ void int __a
Definition: emmintrin.h:4079
static __inline__ __m128i __DEFAULT_FN_ATTRS_CONSTEXPR _mm_set1_epi32(int __i)
Initializes all values in a 128-bit vector of [4 x i32] with the specified 32-bit value.
Definition: emmintrin.h:3711
struct __storeu_i16 *__P __v
Definition: immintrin.h:472
__inline unsigned int unsigned int unsigned int * __P
Definition: bmi2intrin.h:25
static __inline__ __m128 __DEFAULT_FN_ATTRS_CONSTEXPR _mm_setzero_ps(void)
Constructs a 128-bit floating-point vector of [4 x float] initialized to zero.
Definition: xmmintrin.h:2029