clang 22.0.0git
intrin.h
Go to the documentation of this file.
1/* ===-------- intrin.h ---------------------------------------------------===
2 *
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 *
7 *===-----------------------------------------------------------------------===
8 */
9
10/* Only include this if we're compiling for the windows platform. */
11#ifndef _MSC_VER
12#include_next <intrin.h>
13#else
14
15#ifndef __INTRIN_H
16#define __INTRIN_H
17
18#include <intrin0.h>
19
20/* First include the standard intrinsics. */
21#if defined(__i386__) || (defined(__x86_64__) && !defined(__arm64ec__))
22#include <x86intrin.h>
23#endif
24
25#if defined(__arm__)
26#include <armintr.h>
27#endif
28
29#if defined(__aarch64__) || defined(__arm64ec__)
30#include <arm64intr.h>
31#endif
32
33#if defined(__ARM_ACLE)
34#include <arm_acle.h>
35#endif
36
37/* For the definition of jmp_buf. */
38#if __STDC_HOSTED__
39#include <setjmp.h>
40#endif
41
42/* Define the default attributes for the functions in this file. */
43#define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
44
45#if __x86_64__
46#define __LPTRINT_TYPE__ __int64
47#else
48#define __LPTRINT_TYPE__ long
49#endif
50
51#ifdef __cplusplus
52extern "C" {
53#endif
54
55#if defined(__MMX__)
56/* And the random ones that aren't in those files. */
57__m64 _m_from_float(float);
58float _m_to_float(__m64);
59#endif
60
61/* Other assorted instruction intrinsics. */
62void __addfsbyte(unsigned long, unsigned char);
63void __addfsdword(unsigned long, unsigned long);
64void __addfsword(unsigned long, unsigned short);
65void __code_seg(const char *);
66void __cpuid(int[4], int);
67void __cpuidex(int[4], int, int);
68__int64 __emul(int, int);
69unsigned __int64 __emulu(unsigned int, unsigned int);
70unsigned int __getcallerseflags(void);
71void __halt(void);
72unsigned char __inbyte(unsigned short);
73void __inbytestring(unsigned short, unsigned char *, unsigned long);
74void __incfsbyte(unsigned long);
75void __incfsdword(unsigned long);
76void __incfsword(unsigned long);
77unsigned long __indword(unsigned short);
78void __indwordstring(unsigned short, unsigned long *, unsigned long);
79void __int2c(void);
80void __invlpg(void *);
81unsigned short __inword(unsigned short);
82void __inwordstring(unsigned short, unsigned short *, unsigned long);
83void __lidt(void *);
84unsigned __int64 __ll_lshift(unsigned __int64, int);
85__int64 __ll_rshift(__int64, int);
86void __movsb(unsigned char *, unsigned char const *, size_t);
87void __movsd(unsigned long *, unsigned long const *, size_t);
88void __movsw(unsigned short *, unsigned short const *, size_t);
89void __nop(void);
90void __nvreg_restore_fence(void);
91void __nvreg_save_fence(void);
92void __outbyte(unsigned short, unsigned char);
93void __outbytestring(unsigned short, unsigned char *, unsigned long);
94void __outdword(unsigned short, unsigned long);
95void __outdwordstring(unsigned short, unsigned long *, unsigned long);
96void __outword(unsigned short, unsigned short);
97void __outwordstring(unsigned short, unsigned short *, unsigned long);
98unsigned long __readcr0(void);
99unsigned long __readcr2(void);
100unsigned __LPTRINT_TYPE__ __readcr3(void);
101unsigned __LPTRINT_TYPE__ __readcr4(void);
102unsigned __int64 __readcr8(void);
103unsigned int __readdr(unsigned int);
104#ifdef __i386__
105unsigned char __readfsbyte(unsigned long);
106unsigned short __readfsword(unsigned long);
107unsigned long __readfsdword(unsigned long);
108unsigned __int64 __readfsqword(unsigned long);
109#endif
110unsigned __int64 __readmsr(unsigned long);
111unsigned __int64 __readpmc(unsigned long);
112unsigned long __segmentlimit(unsigned long);
113void __sidt(void *);
114void __stosb(unsigned char *, unsigned char, size_t);
115void __stosd(unsigned long *, unsigned long, size_t);
116void __stosw(unsigned short *, unsigned short, size_t);
117void __svm_clgi(void);
118void __svm_invlpga(void *, int);
119void __svm_skinit(int);
120void __svm_stgi(void);
121void __svm_vmload(size_t);
122void __svm_vmrun(size_t);
123void __svm_vmsave(size_t);
124void __ud2(void);
125unsigned __int64 __ull_rshift(unsigned __int64, int);
126void __vmx_off(void);
127void __vmx_vmptrst(unsigned __int64 *);
128void __wbinvd(void);
129void __writecr0(unsigned int);
130void __writecr3(unsigned __INTPTR_TYPE__);
131void __writecr4(unsigned __INTPTR_TYPE__);
132void __writecr8(unsigned __int64);
133void __writedr(unsigned int, unsigned int);
134void __writefsbyte(unsigned long, unsigned char);
135void __writefsdword(unsigned long, unsigned long);
136void __writefsqword(unsigned long, unsigned __int64);
137void __writefsword(unsigned long, unsigned short);
138void __writemsr(unsigned long, unsigned __int64);
139void *_AddressOfReturnAddress(void);
140unsigned char _bittest(long const *, long);
141unsigned char _bittestandcomplement(long *, long);
142unsigned char _bittestandreset(long *, long);
143unsigned char _bittestandset(long *, long);
144void __cdecl _disable(void);
145void __cdecl _enable(void);
146long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
147unsigned char _interlockedbittestandreset(long volatile *, long);
148unsigned char _interlockedbittestandset(long volatile *, long);
149void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
150 void *);
151void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
152 void *);
153long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
154long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
155__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
156__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
157void _ReadBarrier(void);
158unsigned int _rorx_u32(unsigned int, const unsigned int);
159int _sarx_i32(int, unsigned int);
160#if __STDC_HOSTED__
161int __cdecl _setjmp(jmp_buf);
162#endif
163unsigned int _shlx_u32(unsigned int, unsigned int);
164unsigned int _shrx_u32(unsigned int, unsigned int);
165void _Store_HLERelease(long volatile *, long);
166void _Store64_HLERelease(__int64 volatile *, __int64);
167void _StorePointer_HLERelease(void *volatile *, void *);
168void _WriteBarrier(void);
169
170/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
171#if defined(__x86_64__) && !defined(__arm64ec__)
172void __addgsbyte(unsigned long, unsigned char);
173void __addgsdword(unsigned long, unsigned long);
174void __addgsqword(unsigned long, unsigned __int64);
175void __addgsword(unsigned long, unsigned short);
176void __faststorefence(void);
177void __incgsbyte(unsigned long);
178void __incgsdword(unsigned long);
179void __incgsqword(unsigned long);
180void __incgsword(unsigned long);
181void __movsq(unsigned long long *, unsigned long long const *, size_t);
182unsigned char __readgsbyte(unsigned long);
183unsigned long __readgsdword(unsigned long);
184unsigned __int64 __readgsqword(unsigned long);
185unsigned short __readgsword(unsigned long);
186void __stosq(unsigned __int64 *, unsigned __int64, size_t);
187unsigned char __vmx_on(unsigned __int64 *);
188unsigned char __vmx_vmclear(unsigned __int64 *);
189unsigned char __vmx_vmlaunch(void);
190unsigned char __vmx_vmptrld(unsigned __int64 *);
191unsigned char __vmx_vmread(size_t, size_t *);
192unsigned char __vmx_vmresume(void);
193unsigned char __vmx_vmwrite(size_t, size_t);
194void __writegsbyte(unsigned long, unsigned char);
195void __writegsdword(unsigned long, unsigned long);
196void __writegsqword(unsigned long, unsigned __int64);
197void __writegsword(unsigned long, unsigned short);
198unsigned char _bittest64(__int64 const *, __int64);
199unsigned char _bittestandcomplement64(__int64 *, __int64);
200unsigned char _bittestandreset64(__int64 *, __int64);
201unsigned char _bittestandset64(__int64 *, __int64);
202long _InterlockedAnd_np(long volatile *_Value, long _Mask);
203short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
204__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
205char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
206unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
207unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
208long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
209 long _Comparand);
210unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
211 __int64 _ExchangeHigh,
212 __int64 _ExchangeLow,
213 __int64 *_ComparandResult);
214short _InterlockedCompareExchange16_np(short volatile *_Destination,
215 short _Exchange, short _Comparand);
216__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
217 __int64 _Exchange, __int64 _Comparand);
218void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
219 void *_Exchange, void *_Comparand);
220long _InterlockedOr_np(long volatile *_Value, long _Mask);
221short _InterlockedOr16_np(short volatile *_Value, short _Mask);
222__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
223char _InterlockedOr8_np(char volatile *_Value, char _Mask);
224long _InterlockedXor_np(long volatile *_Value, long _Mask);
225short _InterlockedXor16_np(short volatile *_Value, short _Mask);
226__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
227char _InterlockedXor8_np(char volatile *_Value, char _Mask);
228unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
229__int64 _sarx_i64(__int64, unsigned int);
230unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
231unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
232__int64 __mulh(__int64, __int64);
233unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
234__int64 _mul128(__int64, __int64, __int64 *);
235
236#endif /* __x86_64__ */
237
238/*----------------------------------------------------------------------------*\
239|* movs, stos
240\*----------------------------------------------------------------------------*/
241
242#if defined(__i386__) || (defined(__x86_64__) && !defined(__arm64ec__))
243static __inline__ void __DEFAULT_FN_ATTRS __movsb(unsigned char *__dst,
244 unsigned char const *__src,
245 size_t __n) {
246#if defined(__x86_64__)
247 __asm__ __volatile__("rep movsb"
248 : "+D"(__dst), "+S"(__src), "+c"(__n)
249 :
250 : "memory");
251#else
252 __asm__ __volatile__("xchg {%%esi, %1|%1, esi}\n"
253 "rep movsb\n"
254 "xchg {%%esi, %1|%1, esi}"
255 : "+D"(__dst), "+r"(__src), "+c"(__n)
256 :
257 : "memory");
258#endif
259}
260static __inline__ void __DEFAULT_FN_ATTRS __movsd(unsigned long *__dst,
261 unsigned long const *__src,
262 size_t __n) {
263#if defined(__x86_64__)
264 __asm__ __volatile__("rep movs{l|d}"
265 : "+D"(__dst), "+S"(__src), "+c"(__n)
266 :
267 : "memory");
268#else
269 __asm__ __volatile__("xchg {%%esi, %1|%1, esi}\n"
270 "rep movs{l|d}\n"
271 "xchg {%%esi, %1|%1, esi}"
272 : "+D"(__dst), "+r"(__src), "+c"(__n)
273 :
274 : "memory");
275#endif
276}
277static __inline__ void __DEFAULT_FN_ATTRS __movsw(unsigned short *__dst,
278 unsigned short const *__src,
279 size_t __n) {
280#if defined(__x86_64__)
281 __asm__ __volatile__("rep movsw"
282 : "+D"(__dst), "+S"(__src), "+c"(__n)
283 :
284 : "memory");
285#else
286 __asm__ __volatile__("xchg {%%esi, %1|%1, esi}\n"
287 "rep movsw\n"
288 "xchg {%%esi, %1|%1, esi}"
289 : "+D"(__dst), "+r"(__src), "+c"(__n)
290 :
291 : "memory");
292#endif
293}
294static __inline__ void __DEFAULT_FN_ATTRS __stosd(unsigned long *__dst,
295 unsigned long __x,
296 size_t __n) {
297 __asm__ __volatile__("rep stos{l|d}"
298 : "+D"(__dst), "+c"(__n)
299 : "a"(__x)
300 : "memory");
301}
302static __inline__ void __DEFAULT_FN_ATTRS __stosw(unsigned short *__dst,
303 unsigned short __x,
304 size_t __n) {
305 __asm__ __volatile__("rep stosw"
306 : "+D"(__dst), "+c"(__n)
307 : "a"(__x)
308 : "memory");
309}
310#endif
311#if defined(__x86_64__) && !defined(__arm64ec__)
312static __inline__ void __DEFAULT_FN_ATTRS __movsq(
313 unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
314 __asm__ __volatile__("rep movsq"
315 : "+D"(__dst), "+S"(__src), "+c"(__n)
316 :
317 : "memory");
318}
319static __inline__ void __DEFAULT_FN_ATTRS __stosq(unsigned __int64 *__dst,
320 unsigned __int64 __x,
321 size_t __n) {
322 __asm__ __volatile__("rep stosq" : "+D"(__dst), "+c"(__n) : "a"(__x)
323 : "memory");
324}
325#endif
326
327/*----------------------------------------------------------------------------*\
328|* Misc
329\*----------------------------------------------------------------------------*/
330#if defined(__i386__) || (defined(__x86_64__) && !defined(__arm64ec__))
331static __inline__ void __DEFAULT_FN_ATTRS __halt(void) {
332 __asm__ volatile("hlt");
333}
334
335static __inline__ unsigned char __inbyte(unsigned short port) {
336 unsigned char ret;
337 __asm__ __volatile__("inb %w1, %b0" : "=a"(ret) : "Nd"(port));
338 return ret;
339}
340
341static __inline__ unsigned short __inword(unsigned short port) {
342 unsigned short ret;
343 __asm__ __volatile__("inw %w1, %w0" : "=a"(ret) : "Nd"(port));
344 return ret;
345}
346
347static __inline__ unsigned long __indword(unsigned short port) {
348 unsigned long ret;
349 __asm__ __volatile__("inl %w1, %k0" : "=a"(ret) : "Nd"(port));
350 return ret;
351}
352
353static __inline__ void __outbyte(unsigned short port, unsigned char data) {
354 __asm__ __volatile__("outb %b0, %w1" : : "a"(data), "Nd"(port));
355}
356
357static __inline__ void __outword(unsigned short port, unsigned short data) {
358 __asm__ __volatile__("outw %w0, %w1" : : "a"(data), "Nd"(port));
359}
360
361static __inline__ void __outdword(unsigned short port, unsigned long data) {
362 __asm__ __volatile__("outl %k0, %w1" : : "a"(data), "Nd"(port));
363}
364#endif
365
366#if defined(__i386__) || defined(__x86_64__) || defined(__aarch64__)
367static __inline__ void __DEFAULT_FN_ATTRS __nop(void) {
368 __asm__ volatile("nop");
369}
370#endif
371
372/*----------------------------------------------------------------------------*\
373|* MS AArch64 specific
374\*----------------------------------------------------------------------------*/
375#if defined(__aarch64__) || defined(__arm64ec__)
376unsigned __int64 __getReg(int);
377unsigned char _interlockedbittestandreset_acq(long volatile *, long);
378unsigned char _interlockedbittestandreset_nf(long volatile *, long);
379unsigned char _interlockedbittestandreset_rel(long volatile *, long);
380unsigned char _interlockedbittestandreset64_acq(__int64 volatile *, __int64);
381unsigned char _interlockedbittestandreset64_nf(__int64 volatile *, __int64);
382unsigned char _interlockedbittestandreset64_rel(__int64 volatile *, __int64);
383unsigned char _interlockedbittestandset_acq(long volatile *, long);
384unsigned char _interlockedbittestandset_nf(long volatile *, long);
385unsigned char _interlockedbittestandset_rel(long volatile *, long);
386unsigned char _interlockedbittestandset64_acq(__int64 volatile *, __int64);
387unsigned char _interlockedbittestandset64_nf(__int64 volatile *, __int64);
388unsigned char _interlockedbittestandset64_rel(__int64 volatile *, __int64);
389long _InterlockedAdd(long volatile *, long);
390long _InterlockedAdd_acq(long volatile *, long);
391long _InterlockedAdd_nf(long volatile *, long);
392long _InterlockedAdd_rel(long volatile *, long);
393__int64 _InterlockedAdd64(__int64 volatile *, __int64);
394__int64 _InterlockedAdd64_acq(__int64 volatile *, __int64);
395__int64 _InterlockedAdd64_nf(__int64 volatile *, __int64);
396__int64 _InterlockedAdd64_rel(__int64 volatile *, __int64);
397__int64 _ReadStatusReg(int);
398void _WriteStatusReg(int, __int64);
399unsigned int __sys(int, __int64);
400
401unsigned short __cdecl _byteswap_ushort(unsigned short val);
402unsigned long __cdecl _byteswap_ulong (unsigned long val);
403unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64 val);
404
405__int64 __mulh(__int64 __a, __int64 __b);
406unsigned __int64 __umulh(unsigned __int64 __a, unsigned __int64 __b);
407
408void __break(int);
409
410void __writex18byte(unsigned long offset, unsigned char data);
411void __writex18word(unsigned long offset, unsigned short data);
412void __writex18dword(unsigned long offset, unsigned long data);
413void __writex18qword(unsigned long offset, unsigned __int64 data);
414
415unsigned char __readx18byte(unsigned long offset);
416unsigned short __readx18word(unsigned long offset);
417unsigned long __readx18dword(unsigned long offset);
418unsigned __int64 __readx18qword(unsigned long offset);
419
420void __addx18byte(unsigned long offset, unsigned char data);
421void __addx18word(unsigned long offset, unsigned short data);
422void __addx18dword(unsigned long offset, unsigned long data);
423void __addx18qword(unsigned long offset, unsigned __int64 data);
424
425void __incx18byte(unsigned long offset);
426void __incx18word(unsigned long offset);
427void __incx18dword(unsigned long offset);
428void __incx18qword(unsigned long offset);
429
430double _CopyDoubleFromInt64(__int64);
431float _CopyFloatFromInt32(__int32);
432__int32 _CopyInt32FromFloat(float);
433__int64 _CopyInt64FromDouble(double);
434
435unsigned int _CountLeadingOnes(unsigned long);
436unsigned int _CountLeadingOnes64(unsigned __int64);
437unsigned int _CountLeadingSigns(long);
438unsigned int _CountLeadingSigns64(__int64);
439unsigned int _CountOneBits(unsigned long);
440unsigned int _CountOneBits64(unsigned __int64);
441
442unsigned int __hlt(unsigned int, ...);
443
444void __cdecl __prefetch(const void *);
445
446#endif
447
448/*----------------------------------------------------------------------------*\
449|* Privileged intrinsics
450\*----------------------------------------------------------------------------*/
451#if defined(__i386__) || (defined(__x86_64__) && !defined(__arm64ec__))
452static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
453__readmsr(unsigned long __register) {
454 // Loads the contents of a 64-bit model specific register (MSR) specified in
455 // the ECX register into registers EDX:EAX. The EDX register is loaded with
456 // the high-order 32 bits of the MSR and the EAX register is loaded with the
457 // low-order 32 bits. If less than 64 bits are implemented in the MSR being
458 // read, the values returned to EDX:EAX in unimplemented bit locations are
459 // undefined.
460 unsigned long __edx;
461 unsigned long __eax;
462 __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
463 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
464}
465
466static __inline__ unsigned __LPTRINT_TYPE__ __DEFAULT_FN_ATTRS __readcr3(void) {
467 unsigned __LPTRINT_TYPE__ __cr3_val;
468 __asm__ __volatile__(
469 "mov {%%cr3, %0|%0, cr3}"
470 : "=r"(__cr3_val)
471 :
472 : "memory");
473 return __cr3_val;
474}
475
476static __inline__ void __DEFAULT_FN_ATTRS
477__writecr3(unsigned __INTPTR_TYPE__ __cr3_val) {
478 __asm__ ("mov {%0, %%cr3|cr3, %0}" : : "r"(__cr3_val) : "memory");
479}
480#endif
481
482#ifdef __cplusplus
483}
484#endif
485
486#undef __LPTRINT_TYPE__
487
488#undef __DEFAULT_FN_ATTRS
489
490#endif /* __INTRIN_H */
491#endif /* _MSC_VER */
#define __DEFAULT_FN_ATTRS
static __inline__ vector float vector float __b
Definition altivec.h:578
__asm__("swp %0, %1, [%2]" :"=r"(__v) :"r"(__x), "r"(__p) :"memory")
#define __cpuid(__leaf, __eax, __ebx, __ecx, __edx)
Definition cpuid.h:270
static __inline void __cpuidex(int __cpu_info[4], int __leaf, int __subleaf)
Definition cpuid.h:352
static __inline__ void int __a
Definition emmintrin.h:4077
#define __break(_1)
static __inline__ void const void * __src