clang 22.0.0git
avx10_2_512convertintrin.h
Go to the documentation of this file.
1/*===--------- avx10_2_512convertintrin.h - AVX10_2_512CONVERT -------------===
2 *
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 *
7 *===-----------------------------------------------------------------------===
8 */
9#ifndef __IMMINTRIN_H
10#error \
11 "Never use <avx10_2_512convertintrin.h> directly; include <immintrin.h> instead."
12#endif // __IMMINTRIN_H
13
14#ifdef __SSE2__
15
16#ifndef __AVX10_2_512CONVERTINTRIN_H
17#define __AVX10_2_512CONVERTINTRIN_H
18
19/* Define the default attributes for the functions in this file. */
20#define __DEFAULT_FN_ATTRS512 \
21 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2"), \
22 __min_vector_width__(512)))
23
24static __inline__ __m512h __DEFAULT_FN_ATTRS512 _mm512_cvtx2ps_ph(__m512 __A,
25 __m512 __B) {
26 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
27 (__v16sf)__A, (__v16sf)__B, (__v32hf)_mm512_setzero_ph(), (__mmask32)(-1),
29}
30
31static __inline__ __m512h __DEFAULT_FN_ATTRS512
32_mm512_mask_cvtx2ps_ph(__m512h __W, __mmask32 __U, __m512 __A, __m512 __B) {
33 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
34 (__v16sf)__A, (__v16sf)__B, (__v32hf)__W, (__mmask32)__U,
36}
37
38static __inline__ __m512h __DEFAULT_FN_ATTRS512
39_mm512_maskz_cvtx2ps_ph(__mmask32 __U, __m512 __A, __m512 __B) {
40 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
41 (__v16sf)__A, (__v16sf)__B, (__v32hf)_mm512_setzero_ph(), (__mmask32)__U,
43}
44
45#define _mm512_cvtx_round2ps_ph(A, B, R) \
46 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask( \
47 (__v16sf)(A), (__v16sf)(B), (__v32hf)_mm512_undefined_ph(), \
48 (__mmask32)(-1), (const int)(R)))
49
50#define _mm512_mask_cvtx_round2ps_ph(W, U, A, B, R) \
51 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask((__v16sf)(A), (__v16sf)(B), \
52 (__v32hf)(W), (__mmask32)(U), \
53 (const int)(R)))
54
55#define _mm512_maskz_cvtx_round2ps_ph(U, A, B, R) \
56 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask( \
57 (__v16sf)(A), (__v16sf)(B), (__v32hf)_mm512_setzero_ph(), \
58 (__mmask32)(U), (const int)(R)))
59
60static __inline__ __m256i __DEFAULT_FN_ATTRS512
61_mm512_cvtbiasph_bf8(__m512i __A, __m512h __B) {
62 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
63 (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
64 (__mmask32)-1);
65}
66
67static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_bf8(
68 __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
69 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
70 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
71}
72
73static __inline__ __m256i __DEFAULT_FN_ATTRS512
74_mm512_maskz_cvtbiasph_bf8(__mmask32 __U, __m512i __A, __m512h __B) {
75 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
76 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
77 (__mmask32)__U);
78}
79
80static __inline__ __m256i __DEFAULT_FN_ATTRS512
81_mm512_cvts_biasph_bf8(__m512i __A, __m512h __B) {
82 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
83 (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
84 (__mmask32)-1);
85}
86
87static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvts_biasph_bf8(
88 __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
89 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
90 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
91}
92
93static __inline__ __m256i __DEFAULT_FN_ATTRS512
94_mm512_maskz_cvts_biasph_bf8(__mmask32 __U, __m512i __A, __m512h __B) {
95 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
96 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
97 (__mmask32)__U);
98}
99
100static __inline__ __m256i __DEFAULT_FN_ATTRS512
101_mm512_cvtbiasph_hf8(__m512i __A, __m512h __B) {
102 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
103 (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
104 (__mmask32)-1);
105}
106
107static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_hf8(
108 __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
109 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
110 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
111}
112
113static __inline__ __m256i __DEFAULT_FN_ATTRS512
114_mm512_maskz_cvtbiasph_hf8(__mmask32 __U, __m512i __A, __m512h __B) {
115 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
116 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
117 (__mmask32)__U);
118}
119
120static __inline__ __m256i __DEFAULT_FN_ATTRS512
121_mm512_cvts_biasph_hf8(__m512i __A, __m512h __B) {
122 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
123 (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
124 (__mmask32)-1);
125}
126
127static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvts_biasph_hf8(
128 __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
129 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
130 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
131}
132
133static __inline__ __m256i __DEFAULT_FN_ATTRS512
134_mm512_maskz_cvts_biasph_hf8(__mmask32 __U, __m512i __A, __m512h __B) {
135 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
136 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
137 (__mmask32)__U);
138}
139
140static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_cvt2ph_bf8(__m512h __A,
141 __m512h __B) {
142 return (__m512i)__builtin_ia32_vcvt2ph2bf8_512((__v32hf)(__A),
143 (__v32hf)(__B));
144}
145
146static __inline__ __m512i __DEFAULT_FN_ATTRS512
147_mm512_mask_cvt2ph_bf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
148 return (__m512i)__builtin_ia32_selectb_512(
149 (__mmask64)__U, (__v64qi)_mm512_cvt2ph_bf8(__A, __B), (__v64qi)__W);
150}
151
152static __inline__ __m512i __DEFAULT_FN_ATTRS512
153_mm512_maskz_cvt2ph_bf8(__mmask64 __U, __m512h __A, __m512h __B) {
154 return (__m512i)__builtin_ia32_selectb_512(
155 (__mmask64)__U, (__v64qi)_mm512_cvt2ph_bf8(__A, __B),
156 (__v64qi)(__m512i)_mm512_setzero_si512());
157}
158
159static __inline__ __m512i __DEFAULT_FN_ATTRS512
160_mm512_cvts_2ph_bf8(__m512h __A, __m512h __B) {
161 return (__m512i)__builtin_ia32_vcvt2ph2bf8s_512((__v32hf)(__A),
162 (__v32hf)(__B));
163}
164
165static __inline__ __m512i __DEFAULT_FN_ATTRS512
166_mm512_mask_cvts_2ph_bf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
167 return (__m512i)__builtin_ia32_selectb_512(
168 (__mmask64)__U, (__v64qi)_mm512_cvts_2ph_bf8(__A, __B), (__v64qi)__W);
169}
170
171static __inline__ __m512i __DEFAULT_FN_ATTRS512
172_mm512_maskz_cvts_2ph_bf8(__mmask64 __U, __m512h __A, __m512h __B) {
173 return (__m512i)__builtin_ia32_selectb_512(
174 (__mmask64)__U, (__v64qi)_mm512_cvts_2ph_bf8(__A, __B),
175 (__v64qi)(__m512i)_mm512_setzero_si512());
176}
177
178static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_cvt2ph_hf8(__m512h __A,
179 __m512h __B) {
180 return (__m512i)__builtin_ia32_vcvt2ph2hf8_512((__v32hf)(__A),
181 (__v32hf)(__B));
182}
183
184static __inline__ __m512i __DEFAULT_FN_ATTRS512
185_mm512_mask_cvt2ph_hf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
186 return (__m512i)__builtin_ia32_selectb_512(
187 (__mmask64)__U, (__v64qi)_mm512_cvt2ph_hf8(__A, __B), (__v64qi)__W);
188}
189
190static __inline__ __m512i __DEFAULT_FN_ATTRS512
191_mm512_maskz_cvt2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
192 return (__m512i)__builtin_ia32_selectb_512(
193 (__mmask64)__U, (__v64qi)_mm512_cvt2ph_hf8(__A, __B),
194 (__v64qi)(__m512i)_mm512_setzero_si512());
195}
196
197static __inline__ __m512i __DEFAULT_FN_ATTRS512
198_mm512_cvts_2ph_hf8(__m512h __A, __m512h __B) {
199 return (__m512i)__builtin_ia32_vcvt2ph2hf8s_512((__v32hf)(__A),
200 (__v32hf)(__B));
201}
202
203static __inline__ __m512i __DEFAULT_FN_ATTRS512
204_mm512_mask_cvts_2ph_hf8(__m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
205 return (__m512i)__builtin_ia32_selectb_512(
206 (__mmask64)__U, (__v64qi)_mm512_cvts_2ph_hf8(__A, __B), (__v64qi)__W);
207}
208
209static __inline__ __m512i __DEFAULT_FN_ATTRS512
210_mm512_maskz_cvts_2ph_hf8(__mmask64 __U, __m512h __A, __m512h __B) {
211 return (__m512i)__builtin_ia32_selectb_512(
212 (__mmask64)__U, (__v64qi)_mm512_cvts_2ph_hf8(__A, __B),
213 (__v64qi)(__m512i)_mm512_setzero_si512());
214}
215
216static __inline__ __m512h __DEFAULT_FN_ATTRS512 _mm512_cvthf8_ph(__m256i __A) {
217 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
218 (__v32qi)__A, (__v32hf)(__m512h)_mm512_undefined_ph(), (__mmask32)-1);
219}
220
221static __inline__ __m512h __DEFAULT_FN_ATTRS512
222_mm512_mask_cvthf8_ph(__m512h __W, __mmask32 __U, __m256i __A) {
223 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
224 (__v32qi)__A, (__v32hf)(__m512h)__W, (__mmask32)__U);
225}
226
227static __inline__ __m512h __DEFAULT_FN_ATTRS512
228_mm512_maskz_cvthf8_ph(__mmask32 __U, __m256i __A) {
229 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
230 (__v32qi)__A, (__v32hf)(__m512h)_mm512_setzero_ph(), (__mmask32)__U);
231}
232
233static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_cvtph_bf8(__m512h __A) {
234 return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
235 (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
236}
237
238static __inline__ __m256i __DEFAULT_FN_ATTRS512
239_mm512_mask_cvtph_bf8(__m256i __W, __mmask32 __U, __m512h __A) {
240 return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
241 (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
242}
243
244static __inline__ __m256i __DEFAULT_FN_ATTRS512
245_mm512_maskz_cvtph_bf8(__mmask32 __U, __m512h __A) {
246 return (__m256i)__builtin_ia32_vcvtph2bf8_512_mask(
247 (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
248}
249
250static __inline__ __m256i __DEFAULT_FN_ATTRS512
251_mm512_cvts_ph_bf8(__m512h __A) {
252 return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
253 (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
254}
255
256static __inline__ __m256i __DEFAULT_FN_ATTRS512
257_mm512_mask_cvts_ph_bf8(__m256i __W, __mmask32 __U, __m512h __A) {
258 return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
259 (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
260}
261
262static __inline__ __m256i __DEFAULT_FN_ATTRS512
263_mm512_maskz_cvts_ph_bf8(__mmask32 __U, __m512h __A) {
264 return (__m256i)__builtin_ia32_vcvtph2bf8s_512_mask(
265 (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
266}
267
268static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_cvtph_hf8(__m512h __A) {
269 return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
270 (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
271}
272
273static __inline__ __m256i __DEFAULT_FN_ATTRS512
274_mm512_mask_cvtph_hf8(__m256i __W, __mmask32 __U, __m512h __A) {
275 return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
276 (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
277}
278
279static __inline__ __m256i __DEFAULT_FN_ATTRS512
280_mm512_maskz_cvtph_hf8(__mmask32 __U, __m512h __A) {
281 return (__m256i)__builtin_ia32_vcvtph2hf8_512_mask(
282 (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
283}
284
285static __inline__ __m256i __DEFAULT_FN_ATTRS512
286_mm512_cvts_ph_hf8(__m512h __A) {
287 return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
288 (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
289}
290
291static __inline__ __m256i __DEFAULT_FN_ATTRS512
292_mm512_mask_cvts_ph_hf8(__m256i __W, __mmask32 __U, __m512h __A) {
293 return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
294 (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
295}
296
297static __inline__ __m256i __DEFAULT_FN_ATTRS512
298_mm512_maskz_cvts_ph_hf8(__mmask32 __U, __m512h __A) {
299 return (__m256i)__builtin_ia32_vcvtph2hf8s_512_mask(
300 (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
301}
302
303static __inline __m512h __DEFAULT_FN_ATTRS512 _mm512_cvtbf8_ph(__m256i __A) {
304 return _mm512_castsi512_ph(_mm512_slli_epi16(_mm512_cvtepi8_epi16(__A), 8));
305}
306
307static __inline __m512h __DEFAULT_FN_ATTRS512
308_mm512_mask_cvtbf8_ph(__m512h __S, __mmask32 __U, __m256i __A) {
309 return _mm512_castsi512_ph(
310 _mm512_mask_slli_epi16((__m512i)__S, __U, _mm512_cvtepi8_epi16(__A), 8));
311}
312
313static __inline __m512h __DEFAULT_FN_ATTRS512
314_mm512_maskz_cvtbf8_ph(__mmask32 __U, __m256i __A) {
315 return _mm512_castsi512_ph(
317}
318
319#undef __DEFAULT_FN_ATTRS512
320
321#endif // __AVX10_2_512CONVERTINTRIN_H
322#endif // __SSE2__
#define __DEFAULT_FN_ATTRS512
unsigned long long __mmask64
static __inline__ __m512i __DEFAULT_FN_ATTRS512_CONSTEXPR _mm512_mask_slli_epi16(__m512i __W, __mmask32 __U, __m512i __A, unsigned int __B)
static __inline__ __m512i __DEFAULT_FN_ATTRS512_CONSTEXPR _mm512_cvtepi8_epi16(__m256i __A)
static __inline__ __m512i __DEFAULT_FN_ATTRS512_CONSTEXPR _mm512_slli_epi16(__m512i __A, unsigned int __B)
unsigned int __mmask32
static __inline__ __m512i __DEFAULT_FN_ATTRS512_CONSTEXPR _mm512_maskz_cvtepi8_epi16(__mmask32 __U, __m256i __A)
#define _MM_FROUND_CUR_DIRECTION
static __inline __m512i __DEFAULT_FN_ATTRS512_CONSTEXPR _mm512_setzero_si512(void)
static __inline__ __m256i __DEFAULT_FN_ATTRS _mm256_undefined_si256(void)
Create a 256-bit integer vector with undefined values.
Definition avxintrin.h:3657
static __inline __m256i __DEFAULT_FN_ATTRS_CONSTEXPR _mm256_setzero_si256(void)
Constructs a 256-bit integer vector initialized to zero.
Definition avxintrin.h:4340