clang  6.0.0svn
intrin.h
Go to the documentation of this file.
1 /* ===-------- intrin.h ---------------------------------------------------===
2  *
3  * Permission is hereby granted, free of charge, to any person obtaining a copy
4  * of this software and associated documentation files (the "Software"), to deal
5  * in the Software without restriction, including without limitation the rights
6  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7  * copies of the Software, and to permit persons to whom the Software is
8  * furnished to do so, subject to the following conditions:
9  *
10  * The above copyright notice and this permission notice shall be included in
11  * all copies or substantial portions of the Software.
12  *
13  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19  * THE SOFTWARE.
20  *
21  *===-----------------------------------------------------------------------===
22  */
23 
24 /* Only include this if we're compiling for the windows platform. */
25 #ifndef _MSC_VER
26 #include_next <intrin.h>
27 #else
28 
29 #ifndef __INTRIN_H
30 #define __INTRIN_H
31 
32 /* First include the standard intrinsics. */
33 #if defined(__i386__) || defined(__x86_64__)
34 #include <x86intrin.h>
35 #endif
36 
37 #if defined(__arm__)
38 #include <armintr.h>
39 #endif
40 
41 #if defined(_M_ARM64)
42 #include <arm64intr.h>
43 #endif
44 
45 /* For the definition of jmp_buf. */
46 #if __STDC_HOSTED__
47 #include <setjmp.h>
48 #endif
49 
50 /* Define the default attributes for the functions in this file. */
51 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
52 
53 #ifdef __cplusplus
54 extern "C" {
55 #endif
56 
57 #if defined(__MMX__)
58 /* And the random ones that aren't in those files. */
59 __m64 _m_from_float(float);
60 float _m_to_float(__m64);
61 #endif
62 
63 /* Other assorted instruction intrinsics. */
64 void __addfsbyte(unsigned long, unsigned char);
65 void __addfsdword(unsigned long, unsigned long);
66 void __addfsword(unsigned long, unsigned short);
67 void __code_seg(const char *);
68 static __inline__
69 void __cpuid(int[4], int);
70 static __inline__
71 void __cpuidex(int[4], int, int);
72 static __inline__
73 __int64 __emul(int, int);
74 static __inline__
75 unsigned __int64 __emulu(unsigned int, unsigned int);
76 unsigned int __getcallerseflags(void);
77 static __inline__
78 void __halt(void);
79 unsigned char __inbyte(unsigned short);
80 void __inbytestring(unsigned short, unsigned char *, unsigned long);
81 void __incfsbyte(unsigned long);
82 void __incfsdword(unsigned long);
83 void __incfsword(unsigned long);
84 unsigned long __indword(unsigned short);
85 void __indwordstring(unsigned short, unsigned long *, unsigned long);
86 void __invlpg(void *);
87 unsigned short __inword(unsigned short);
88 void __inwordstring(unsigned short, unsigned short *, unsigned long);
89 void __lidt(void *);
90 unsigned __int64 __ll_lshift(unsigned __int64, int);
91 __int64 __ll_rshift(__int64, int);
92 unsigned int __lzcnt(unsigned int);
93 unsigned short __lzcnt16(unsigned short);
94 static __inline__
95 void __movsb(unsigned char *, unsigned char const *, size_t);
96 static __inline__
97 void __movsd(unsigned long *, unsigned long const *, size_t);
98 static __inline__
99 void __movsw(unsigned short *, unsigned short const *, size_t);
100 static __inline__
101 void __nop(void);
102 void __nvreg_restore_fence(void);
103 void __nvreg_save_fence(void);
104 void __outbyte(unsigned short, unsigned char);
105 void __outbytestring(unsigned short, unsigned char *, unsigned long);
106 void __outdword(unsigned short, unsigned long);
107 void __outdwordstring(unsigned short, unsigned long *, unsigned long);
108 void __outword(unsigned short, unsigned short);
109 void __outwordstring(unsigned short, unsigned short *, unsigned long);
110 unsigned long __readcr0(void);
111 unsigned long __readcr2(void);
112 static __inline__
113 unsigned long __readcr3(void);
114 unsigned long __readcr4(void);
115 unsigned long __readcr8(void);
116 unsigned int __readdr(unsigned int);
117 #ifdef __i386__
118 static __inline__
119 unsigned char __readfsbyte(unsigned long);
120 static __inline__
121 unsigned __int64 __readfsqword(unsigned long);
122 static __inline__
123 unsigned short __readfsword(unsigned long);
124 #endif
125 static __inline__
126 unsigned __int64 __readmsr(unsigned long);
127 unsigned __int64 __readpmc(unsigned long);
128 unsigned long __segmentlimit(unsigned long);
129 void __sidt(void *);
130 static __inline__
131 void __stosb(unsigned char *, unsigned char, size_t);
132 static __inline__
133 void __stosd(unsigned long *, unsigned long, size_t);
134 static __inline__
135 void __stosw(unsigned short *, unsigned short, size_t);
136 void __svm_clgi(void);
137 void __svm_invlpga(void *, int);
138 void __svm_skinit(int);
139 void __svm_stgi(void);
140 void __svm_vmload(size_t);
141 void __svm_vmrun(size_t);
142 void __svm_vmsave(size_t);
143 unsigned __int64 __ull_rshift(unsigned __int64, int);
144 void __vmx_off(void);
145 void __vmx_vmptrst(unsigned __int64 *);
146 void __wbinvd(void);
147 void __writecr0(unsigned int);
148 static __inline__
149 void __writecr3(unsigned int);
150 void __writecr4(unsigned int);
151 void __writecr8(unsigned int);
152 void __writedr(unsigned int, unsigned int);
153 void __writefsbyte(unsigned long, unsigned char);
154 void __writefsdword(unsigned long, unsigned long);
155 void __writefsqword(unsigned long, unsigned __int64);
156 void __writefsword(unsigned long, unsigned short);
157 void __writemsr(unsigned long, unsigned __int64);
158 static __inline__
159 void *_AddressOfReturnAddress(void);
160 static __inline__
161 unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
162 static __inline__
163 unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
164 static __inline__
165 unsigned char _bittest(long const *, long);
166 static __inline__
167 unsigned char _bittestandcomplement(long *, long);
168 static __inline__
169 unsigned char _bittestandreset(long *, long);
170 static __inline__
171 unsigned char _bittestandset(long *, long);
172 void __cdecl _disable(void);
173 void __cdecl _enable(void);
174 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
175 unsigned char _interlockedbittestandreset(long volatile *, long);
176 unsigned char _interlockedbittestandset(long volatile *, long);
177 long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
178 long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
179 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
180  __int64);
181 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
182  __int64);
183 void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
184  void *);
185 void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
186  void *);
187 long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
188 long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
189 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
190 __int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
191 void __cdecl _invpcid(unsigned int, void *);
192 static __inline__ void
193 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
194 _ReadBarrier(void);
195 static __inline__ void
196 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
197 _ReadWriteBarrier(void);
198 unsigned int _rorx_u32(unsigned int, const unsigned int);
199 int _sarx_i32(int, unsigned int);
200 #if __STDC_HOSTED__
201 int __cdecl _setjmp(jmp_buf);
202 #endif
203 unsigned int _shlx_u32(unsigned int, unsigned int);
204 unsigned int _shrx_u32(unsigned int, unsigned int);
205 void _Store_HLERelease(long volatile *, long);
206 void _Store64_HLERelease(__int64 volatile *, __int64);
207 void _StorePointer_HLERelease(void *volatile *, void *);
208 static __inline__ void
209 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
210 _WriteBarrier(void);
211 unsigned __int32 xbegin(void);
212 void _xend(void);
213 static __inline__
214 #define _XCR_XFEATURE_ENABLED_MASK 0
215 unsigned __int64 __cdecl _xgetbv(unsigned int);
216 void __cdecl _xsetbv(unsigned int, unsigned __int64);
217 
218 /* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
219 #ifdef __x86_64__
220 void __addgsbyte(unsigned long, unsigned char);
221 void __addgsdword(unsigned long, unsigned long);
222 void __addgsqword(unsigned long, unsigned __int64);
223 void __addgsword(unsigned long, unsigned short);
224 static __inline__
225 void __faststorefence(void);
226 void __incgsbyte(unsigned long);
227 void __incgsdword(unsigned long);
228 void __incgsqword(unsigned long);
229 void __incgsword(unsigned long);
230 unsigned __int64 __lzcnt64(unsigned __int64);
231 static __inline__
232 void __movsq(unsigned long long *, unsigned long long const *, size_t);
233 static __inline__
234 unsigned char __readgsbyte(unsigned long);
235 static __inline__
236 unsigned long __readgsdword(unsigned long);
237 static __inline__
238 unsigned __int64 __readgsqword(unsigned long);
239 unsigned short __readgsword(unsigned long);
240 unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
241  unsigned __int64 _HighPart,
242  unsigned char _Shift);
243 unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
244  unsigned __int64 _HighPart,
245  unsigned char _Shift);
246 static __inline__
247 void __stosq(unsigned __int64 *, unsigned __int64, size_t);
248 unsigned char __vmx_on(unsigned __int64 *);
249 unsigned char __vmx_vmclear(unsigned __int64 *);
250 unsigned char __vmx_vmlaunch(void);
251 unsigned char __vmx_vmptrld(unsigned __int64 *);
252 unsigned char __vmx_vmread(size_t, size_t *);
253 unsigned char __vmx_vmresume(void);
254 unsigned char __vmx_vmwrite(size_t, size_t);
255 void __writegsbyte(unsigned long, unsigned char);
256 void __writegsdword(unsigned long, unsigned long);
257 void __writegsqword(unsigned long, unsigned __int64);
258 void __writegsword(unsigned long, unsigned short);
259 static __inline__
260 unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
261 static __inline__
262 unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
263 static __inline__
264 unsigned char _bittest64(__int64 const *, __int64);
265 static __inline__
266 unsigned char _bittestandcomplement64(__int64 *, __int64);
267 static __inline__
268 unsigned char _bittestandreset64(__int64 *, __int64);
269 static __inline__
270 unsigned char _bittestandset64(__int64 *, __int64);
271 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
272 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
273 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
274 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
275 unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
276 static __inline__
277 unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
278 long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
279  long _Comparand);
280 unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
281  __int64 _ExchangeHigh,
282  __int64 _ExchangeLow,
283  __int64 *_CompareandResult);
284 unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
285  __int64 _ExchangeHigh,
286  __int64 _ExchangeLow,
287  __int64 *_ComparandResult);
288 short _InterlockedCompareExchange16_np(short volatile *_Destination,
289  short _Exchange, short _Comparand);
290 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
291  __int64);
292 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
293  __int64);
294 __int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
295  __int64 _Exchange, __int64 _Comparand);
296 void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
297  void *_Exchange, void *_Comparand);
298 long _InterlockedOr_np(long volatile *_Value, long _Mask);
299 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
300 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
301 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
302 long _InterlockedXor_np(long volatile *_Value, long _Mask);
303 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
304 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
305 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
306 unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
307 __int64 _sarx_i64(__int64, unsigned int);
308 unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
309 unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
310 static __inline__
311 __int64 __mulh(__int64, __int64);
312 static __inline__
313 unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
314 static __inline__
315 __int64 _mul128(__int64, __int64, __int64*);
316 static __inline__
317 unsigned __int64 _umul128(unsigned __int64,
318  unsigned __int64,
319  unsigned __int64*);
320 
321 #endif /* __x86_64__ */
322 
323 #if defined(__x86_64__) || defined(__arm__)
324 
325 static __inline__
326 __int64 _InterlockedDecrement64(__int64 volatile *_Addend);
327 static __inline__
328 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
329 static __inline__
330 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
331 static __inline__
332 __int64 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value);
333 static __inline__
334 __int64 _InterlockedIncrement64(__int64 volatile *_Addend);
335 static __inline__
336 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
337 static __inline__
338 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
339 static __inline__
340 __int64 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask);
341 
342 #endif
343 
344 /*----------------------------------------------------------------------------*\
345 |* Bit Counting and Testing
346 \*----------------------------------------------------------------------------*/
347 static __inline__ unsigned char __DEFAULT_FN_ATTRS
348 _bittest(long const *_BitBase, long _BitPos) {
349  return (*_BitBase >> _BitPos) & 1;
350 }
351 static __inline__ unsigned char __DEFAULT_FN_ATTRS
352 _bittestandcomplement(long *_BitBase, long _BitPos) {
353  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
354  *_BitBase = *_BitBase ^ (1 << _BitPos);
355  return _Res;
356 }
357 static __inline__ unsigned char __DEFAULT_FN_ATTRS
358 _bittestandreset(long *_BitBase, long _BitPos) {
359  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
360  *_BitBase = *_BitBase & ~(1 << _BitPos);
361  return _Res;
362 }
363 static __inline__ unsigned char __DEFAULT_FN_ATTRS
364 _bittestandset(long *_BitBase, long _BitPos) {
365  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
366  *_BitBase = *_BitBase | (1 << _BitPos);
367  return _Res;
368 }
369 #if defined(__arm__) || defined(__aarch64__)
370 static __inline__ unsigned char __DEFAULT_FN_ATTRS
371 _interlockedbittestandset_acq(long volatile *_BitBase, long _BitPos) {
372  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
373  return (_PrevVal >> _BitPos) & 1;
374 }
375 static __inline__ unsigned char __DEFAULT_FN_ATTRS
376 _interlockedbittestandset_nf(long volatile *_BitBase, long _BitPos) {
377  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
378  return (_PrevVal >> _BitPos) & 1;
379 }
380 static __inline__ unsigned char __DEFAULT_FN_ATTRS
381 _interlockedbittestandset_rel(long volatile *_BitBase, long _BitPos) {
382  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
383  return (_PrevVal >> _BitPos) & 1;
384 }
385 #endif
386 #ifdef __x86_64__
387 static __inline__ unsigned char __DEFAULT_FN_ATTRS
388 _bittest64(__int64 const *_BitBase, __int64 _BitPos) {
389  return (*_BitBase >> _BitPos) & 1;
390 }
391 static __inline__ unsigned char __DEFAULT_FN_ATTRS
392 _bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
393  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
394  *_BitBase = *_BitBase ^ (1ll << _BitPos);
395  return _Res;
396 }
397 static __inline__ unsigned char __DEFAULT_FN_ATTRS
398 _bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
399  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
400  *_BitBase = *_BitBase & ~(1ll << _BitPos);
401  return _Res;
402 }
403 static __inline__ unsigned char __DEFAULT_FN_ATTRS
404 _bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
405  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
406  *_BitBase = *_BitBase | (1ll << _BitPos);
407  return _Res;
408 }
409 static __inline__ unsigned char __DEFAULT_FN_ATTRS
410 _interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
411  long long _PrevVal =
412  __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
413  return (_PrevVal >> _BitPos) & 1;
414 }
415 #endif
416 /*----------------------------------------------------------------------------*\
417 |* Interlocked Exchange Add
418 \*----------------------------------------------------------------------------*/
419 #if defined(__arm__) || defined(__aarch64__)
420 static __inline__ char __DEFAULT_FN_ATTRS
421 _InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value) {
422  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
423 }
424 static __inline__ char __DEFAULT_FN_ATTRS
425 _InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value) {
426  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
427 }
428 static __inline__ char __DEFAULT_FN_ATTRS
429 _InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value) {
430  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
431 }
432 static __inline__ short __DEFAULT_FN_ATTRS
433 _InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value) {
434  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
435 }
436 static __inline__ short __DEFAULT_FN_ATTRS
437 _InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value) {
438  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
439 }
440 static __inline__ short __DEFAULT_FN_ATTRS
441 _InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value) {
442  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
443 }
444 static __inline__ long __DEFAULT_FN_ATTRS
445 _InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value) {
446  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
447 }
448 static __inline__ long __DEFAULT_FN_ATTRS
449 _InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value) {
450  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
451 }
452 static __inline__ long __DEFAULT_FN_ATTRS
453 _InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value) {
454  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
455 }
456 static __inline__ __int64 __DEFAULT_FN_ATTRS
457 _InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value) {
458  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
459 }
460 static __inline__ __int64 __DEFAULT_FN_ATTRS
461 _InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value) {
462  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
463 }
464 static __inline__ __int64 __DEFAULT_FN_ATTRS
465 _InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value) {
466  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
467 }
468 #endif
469 /*----------------------------------------------------------------------------*\
470 |* Interlocked Increment
471 \*----------------------------------------------------------------------------*/
472 #if defined(__arm__) || defined(__aarch64__)
473 static __inline__ short __DEFAULT_FN_ATTRS
474 _InterlockedIncrement16_acq(short volatile *_Value) {
475  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
476 }
477 static __inline__ short __DEFAULT_FN_ATTRS
478 _InterlockedIncrement16_nf(short volatile *_Value) {
479  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
480 }
481 static __inline__ short __DEFAULT_FN_ATTRS
482 _InterlockedIncrement16_rel(short volatile *_Value) {
483  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
484 }
485 static __inline__ long __DEFAULT_FN_ATTRS
486 _InterlockedIncrement_acq(long volatile *_Value) {
487  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
488 }
489 static __inline__ long __DEFAULT_FN_ATTRS
490 _InterlockedIncrement_nf(long volatile *_Value) {
491  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
492 }
493 static __inline__ long __DEFAULT_FN_ATTRS
494 _InterlockedIncrement_rel(long volatile *_Value) {
495  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
496 }
497 static __inline__ __int64 __DEFAULT_FN_ATTRS
498 _InterlockedIncrement64_acq(__int64 volatile *_Value) {
499  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
500 }
501 static __inline__ __int64 __DEFAULT_FN_ATTRS
502 _InterlockedIncrement64_nf(__int64 volatile *_Value) {
503  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
504 }
505 static __inline__ __int64 __DEFAULT_FN_ATTRS
506 _InterlockedIncrement64_rel(__int64 volatile *_Value) {
507  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
508 }
509 #endif
510 /*----------------------------------------------------------------------------*\
511 |* Interlocked Decrement
512 \*----------------------------------------------------------------------------*/
513 #if defined(__arm__) || defined(__aarch64__)
514 static __inline__ short __DEFAULT_FN_ATTRS
515 _InterlockedDecrement16_acq(short volatile *_Value) {
516  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
517 }
518 static __inline__ short __DEFAULT_FN_ATTRS
519 _InterlockedDecrement16_nf(short volatile *_Value) {
520  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
521 }
522 static __inline__ short __DEFAULT_FN_ATTRS
523 _InterlockedDecrement16_rel(short volatile *_Value) {
524  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
525 }
526 static __inline__ long __DEFAULT_FN_ATTRS
527 _InterlockedDecrement_acq(long volatile *_Value) {
528  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
529 }
530 static __inline__ long __DEFAULT_FN_ATTRS
531 _InterlockedDecrement_nf(long volatile *_Value) {
532  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
533 }
534 static __inline__ long __DEFAULT_FN_ATTRS
535 _InterlockedDecrement_rel(long volatile *_Value) {
536  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
537 }
538 static __inline__ __int64 __DEFAULT_FN_ATTRS
539 _InterlockedDecrement64_acq(__int64 volatile *_Value) {
540  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
541 }
542 static __inline__ __int64 __DEFAULT_FN_ATTRS
543 _InterlockedDecrement64_nf(__int64 volatile *_Value) {
544  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
545 }
546 static __inline__ __int64 __DEFAULT_FN_ATTRS
547 _InterlockedDecrement64_rel(__int64 volatile *_Value) {
548  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
549 }
550 #endif
551 /*----------------------------------------------------------------------------*\
552 |* Interlocked And
553 \*----------------------------------------------------------------------------*/
554 #if defined(__arm__) || defined(__aarch64__)
555 static __inline__ char __DEFAULT_FN_ATTRS
556 _InterlockedAnd8_acq(char volatile *_Value, char _Mask) {
557  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
558 }
559 static __inline__ char __DEFAULT_FN_ATTRS
560 _InterlockedAnd8_nf(char volatile *_Value, char _Mask) {
561  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
562 }
563 static __inline__ char __DEFAULT_FN_ATTRS
564 _InterlockedAnd8_rel(char volatile *_Value, char _Mask) {
565  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
566 }
567 static __inline__ short __DEFAULT_FN_ATTRS
568 _InterlockedAnd16_acq(short volatile *_Value, short _Mask) {
569  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
570 }
571 static __inline__ short __DEFAULT_FN_ATTRS
572 _InterlockedAnd16_nf(short volatile *_Value, short _Mask) {
573  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
574 }
575 static __inline__ short __DEFAULT_FN_ATTRS
576 _InterlockedAnd16_rel(short volatile *_Value, short _Mask) {
577  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
578 }
579 static __inline__ long __DEFAULT_FN_ATTRS
580 _InterlockedAnd_acq(long volatile *_Value, long _Mask) {
581  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
582 }
583 static __inline__ long __DEFAULT_FN_ATTRS
584 _InterlockedAnd_nf(long volatile *_Value, long _Mask) {
585  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
586 }
587 static __inline__ long __DEFAULT_FN_ATTRS
588 _InterlockedAnd_rel(long volatile *_Value, long _Mask) {
589  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
590 }
591 static __inline__ __int64 __DEFAULT_FN_ATTRS
592 _InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask) {
593  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
594 }
595 static __inline__ __int64 __DEFAULT_FN_ATTRS
596 _InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask) {
597  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
598 }
599 static __inline__ __int64 __DEFAULT_FN_ATTRS
600 _InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask) {
601  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
602 }
603 #endif
604 /*----------------------------------------------------------------------------*\
605 |* Interlocked Or
606 \*----------------------------------------------------------------------------*/
607 #if defined(__arm__) || defined(__aarch64__)
608 static __inline__ char __DEFAULT_FN_ATTRS
609 _InterlockedOr8_acq(char volatile *_Value, char _Mask) {
610  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
611 }
612 static __inline__ char __DEFAULT_FN_ATTRS
613 _InterlockedOr8_nf(char volatile *_Value, char _Mask) {
614  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
615 }
616 static __inline__ char __DEFAULT_FN_ATTRS
617 _InterlockedOr8_rel(char volatile *_Value, char _Mask) {
618  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
619 }
620 static __inline__ short __DEFAULT_FN_ATTRS
621 _InterlockedOr16_acq(short volatile *_Value, short _Mask) {
622  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
623 }
624 static __inline__ short __DEFAULT_FN_ATTRS
625 _InterlockedOr16_nf(short volatile *_Value, short _Mask) {
626  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
627 }
628 static __inline__ short __DEFAULT_FN_ATTRS
629 _InterlockedOr16_rel(short volatile *_Value, short _Mask) {
630  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
631 }
632 static __inline__ long __DEFAULT_FN_ATTRS
633 _InterlockedOr_acq(long volatile *_Value, long _Mask) {
634  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
635 }
636 static __inline__ long __DEFAULT_FN_ATTRS
637 _InterlockedOr_nf(long volatile *_Value, long _Mask) {
638  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
639 }
640 static __inline__ long __DEFAULT_FN_ATTRS
641 _InterlockedOr_rel(long volatile *_Value, long _Mask) {
642  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
643 }
644 static __inline__ __int64 __DEFAULT_FN_ATTRS
645 _InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask) {
646  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
647 }
648 static __inline__ __int64 __DEFAULT_FN_ATTRS
649 _InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask) {
650  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
651 }
652 static __inline__ __int64 __DEFAULT_FN_ATTRS
653 _InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask) {
654  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
655 }
656 #endif
657 /*----------------------------------------------------------------------------*\
658 |* Interlocked Xor
659 \*----------------------------------------------------------------------------*/
660 #if defined(__arm__) || defined(__aarch64__)
661 static __inline__ char __DEFAULT_FN_ATTRS
662 _InterlockedXor8_acq(char volatile *_Value, char _Mask) {
663  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
664 }
665 static __inline__ char __DEFAULT_FN_ATTRS
666 _InterlockedXor8_nf(char volatile *_Value, char _Mask) {
667  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
668 }
669 static __inline__ char __DEFAULT_FN_ATTRS
670 _InterlockedXor8_rel(char volatile *_Value, char _Mask) {
671  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
672 }
673 static __inline__ short __DEFAULT_FN_ATTRS
674 _InterlockedXor16_acq(short volatile *_Value, short _Mask) {
675  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
676 }
677 static __inline__ short __DEFAULT_FN_ATTRS
678 _InterlockedXor16_nf(short volatile *_Value, short _Mask) {
679  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
680 }
681 static __inline__ short __DEFAULT_FN_ATTRS
682 _InterlockedXor16_rel(short volatile *_Value, short _Mask) {
683  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
684 }
685 static __inline__ long __DEFAULT_FN_ATTRS
686 _InterlockedXor_acq(long volatile *_Value, long _Mask) {
687  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
688 }
689 static __inline__ long __DEFAULT_FN_ATTRS
690 _InterlockedXor_nf(long volatile *_Value, long _Mask) {
691  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
692 }
693 static __inline__ long __DEFAULT_FN_ATTRS
694 _InterlockedXor_rel(long volatile *_Value, long _Mask) {
695  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
696 }
697 static __inline__ __int64 __DEFAULT_FN_ATTRS
698 _InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask) {
699  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
700 }
701 static __inline__ __int64 __DEFAULT_FN_ATTRS
702 _InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask) {
703  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
704 }
705 static __inline__ __int64 __DEFAULT_FN_ATTRS
706 _InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask) {
707  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
708 }
709 #endif
710 /*----------------------------------------------------------------------------*\
711 |* Interlocked Exchange
712 \*----------------------------------------------------------------------------*/
713 #if defined(__arm__) || defined(__aarch64__)
714 static __inline__ char __DEFAULT_FN_ATTRS
715 _InterlockedExchange8_acq(char volatile *_Target, char _Value) {
716  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
717  return _Value;
718 }
719 static __inline__ char __DEFAULT_FN_ATTRS
720 _InterlockedExchange8_nf(char volatile *_Target, char _Value) {
721  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
722  return _Value;
723 }
724 static __inline__ char __DEFAULT_FN_ATTRS
725 _InterlockedExchange8_rel(char volatile *_Target, char _Value) {
726  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
727  return _Value;
728 }
729 static __inline__ short __DEFAULT_FN_ATTRS
730 _InterlockedExchange16_acq(short volatile *_Target, short _Value) {
731  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
732  return _Value;
733 }
734 static __inline__ short __DEFAULT_FN_ATTRS
735 _InterlockedExchange16_nf(short volatile *_Target, short _Value) {
736  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
737  return _Value;
738 }
739 static __inline__ short __DEFAULT_FN_ATTRS
740 _InterlockedExchange16_rel(short volatile *_Target, short _Value) {
741  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
742  return _Value;
743 }
744 static __inline__ long __DEFAULT_FN_ATTRS
745 _InterlockedExchange_acq(long volatile *_Target, long _Value) {
746  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
747  return _Value;
748 }
749 static __inline__ long __DEFAULT_FN_ATTRS
750 _InterlockedExchange_nf(long volatile *_Target, long _Value) {
751  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
752  return _Value;
753 }
754 static __inline__ long __DEFAULT_FN_ATTRS
755 _InterlockedExchange_rel(long volatile *_Target, long _Value) {
756  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
757  return _Value;
758 }
759 static __inline__ __int64 __DEFAULT_FN_ATTRS
760 _InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value) {
761  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
762  return _Value;
763 }
764 static __inline__ __int64 __DEFAULT_FN_ATTRS
765 _InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value) {
766  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
767  return _Value;
768 }
769 static __inline__ __int64 __DEFAULT_FN_ATTRS
770 _InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value) {
771  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
772  return _Value;
773 }
774 #endif
775 /*----------------------------------------------------------------------------*\
776 |* Interlocked Compare Exchange
777 \*----------------------------------------------------------------------------*/
778 #if defined(__arm__) || defined(__aarch64__)
779 static __inline__ char __DEFAULT_FN_ATTRS
780 _InterlockedCompareExchange8_acq(char volatile *_Destination,
781  char _Exchange, char _Comparand) {
782  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
783  __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
784  return _Comparand;
785 }
786 static __inline__ char __DEFAULT_FN_ATTRS
787 _InterlockedCompareExchange8_nf(char volatile *_Destination,
788  char _Exchange, char _Comparand) {
789  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
790  __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
791  return _Comparand;
792 }
793 static __inline__ char __DEFAULT_FN_ATTRS
794 _InterlockedCompareExchange8_rel(char volatile *_Destination,
795  char _Exchange, char _Comparand) {
796  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
797  __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
798  return _Comparand;
799 }
800 static __inline__ short __DEFAULT_FN_ATTRS
801 _InterlockedCompareExchange16_acq(short volatile *_Destination,
802  short _Exchange, short _Comparand) {
803  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
804  __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
805  return _Comparand;
806 }
807 static __inline__ short __DEFAULT_FN_ATTRS
808 _InterlockedCompareExchange16_nf(short volatile *_Destination,
809  short _Exchange, short _Comparand) {
810  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
811  __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
812  return _Comparand;
813 }
814 static __inline__ short __DEFAULT_FN_ATTRS
815 _InterlockedCompareExchange16_rel(short volatile *_Destination,
816  short _Exchange, short _Comparand) {
817  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
818  __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
819  return _Comparand;
820 }
821 static __inline__ long __DEFAULT_FN_ATTRS
822 _InterlockedCompareExchange_acq(long volatile *_Destination,
823  long _Exchange, long _Comparand) {
824  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
825  __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
826  return _Comparand;
827 }
828 static __inline__ long __DEFAULT_FN_ATTRS
829 _InterlockedCompareExchange_nf(long volatile *_Destination,
830  long _Exchange, long _Comparand) {
831  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
832  __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
833  return _Comparand;
834 }
835 static __inline__ long __DEFAULT_FN_ATTRS
836 _InterlockedCompareExchange_rel(long volatile *_Destination,
837  long _Exchange, long _Comparand) {
838  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
839  __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
840  return _Comparand;
841 }
842 static __inline__ __int64 __DEFAULT_FN_ATTRS
843 _InterlockedCompareExchange64_acq(__int64 volatile *_Destination,
844  __int64 _Exchange, __int64 _Comparand) {
845  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
846  __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
847  return _Comparand;
848 }
849 static __inline__ __int64 __DEFAULT_FN_ATTRS
850 _InterlockedCompareExchange64_nf(__int64 volatile *_Destination,
851  __int64 _Exchange, __int64 _Comparand) {
852  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
853  __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
854  return _Comparand;
855 }
856 static __inline__ __int64 __DEFAULT_FN_ATTRS
857 _InterlockedCompareExchange64_rel(__int64 volatile *_Destination,
858  __int64 _Exchange, __int64 _Comparand) {
859  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
860  __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
861  return _Comparand;
862 }
863 #endif
864 
865 /*----------------------------------------------------------------------------*\
866 |* movs, stos
867 \*----------------------------------------------------------------------------*/
868 #if defined(__i386__) || defined(__x86_64__)
869 static __inline__ void __DEFAULT_FN_ATTRS
870 __movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
871  __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n));
872 }
873 static __inline__ void __DEFAULT_FN_ATTRS
874 __movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
875  __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n));
876 }
877 static __inline__ void __DEFAULT_FN_ATTRS
878 __movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
879  __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n));
880 }
881 static __inline__ void __DEFAULT_FN_ATTRS
882 __stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
883  __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n));
884 }
885 static __inline__ void __DEFAULT_FN_ATTRS
886 __stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
887  __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n));
888 }
889 #endif
890 #ifdef __x86_64__
891 static __inline__ void __DEFAULT_FN_ATTRS
892 __movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
893  __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n));
894 }
895 static __inline__ void __DEFAULT_FN_ATTRS
896 __stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
897  __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n));
898 }
899 #endif
900 
901 /*----------------------------------------------------------------------------*\
902 |* Misc
903 \*----------------------------------------------------------------------------*/
904 #if defined(__i386__) || defined(__x86_64__)
905 static __inline__ void __DEFAULT_FN_ATTRS
906 __cpuid(int __info[4], int __level) {
907  __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
908  : "a"(__level));
909 }
910 static __inline__ void __DEFAULT_FN_ATTRS
911 __cpuidex(int __info[4], int __level, int __ecx) {
912  __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
913  : "a"(__level), "c"(__ecx));
914 }
915 static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
916 _xgetbv(unsigned int __xcr_no) {
917  unsigned int __eax, __edx;
918  __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
919  return ((unsigned __int64)__edx << 32) | __eax;
920 }
921 static __inline__ void __DEFAULT_FN_ATTRS
922 __halt(void) {
923  __asm__ volatile ("hlt");
924 }
925 static __inline__ void __DEFAULT_FN_ATTRS
926 __nop(void) {
927  __asm__ volatile ("nop");
928 }
929 #endif
930 
931 /*----------------------------------------------------------------------------*\
932 |* Privileged intrinsics
933 \*----------------------------------------------------------------------------*/
934 #if defined(__i386__) || defined(__x86_64__)
935 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
936 __readmsr(unsigned long __register) {
937  // Loads the contents of a 64-bit model specific register (MSR) specified in
938  // the ECX register into registers EDX:EAX. The EDX register is loaded with
939  // the high-order 32 bits of the MSR and the EAX register is loaded with the
940  // low-order 32 bits. If less than 64 bits are implemented in the MSR being
941  // read, the values returned to EDX:EAX in unimplemented bit locations are
942  // undefined.
943  unsigned long __edx;
944  unsigned long __eax;
945  __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
946  return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
947 }
948 
949 static __inline__ unsigned long __DEFAULT_FN_ATTRS
950 __readcr3(void) {
951  unsigned long __cr3_val;
952  __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
953  return __cr3_val;
954 }
955 
956 static __inline__ void __DEFAULT_FN_ATTRS
957 __writecr3(unsigned int __cr3_val) {
958  __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
959 }
960 #endif
961 
962 #ifdef __cplusplus
963 }
964 #endif
965 
966 #undef __DEFAULT_FN_ATTRS
967 
968 #endif /* __INTRIN_H */
969 #endif /* _MSC_VER */
#define __cpuid(__leaf, __eax, __ebx, __ecx, __edx)
Definition: cpuid.h:223
static __inline__ void __DEFAULT_FN_ATTRS _xend(void)
Definition: rtmintrin.h:50
char __v64qi __attribute__((__vector_size__(64)))
Definition: avx512fintrin.h:30
#define __DEFAULT_FN_ATTRS
static __inline__ unsigned short __DEFAULT_FN_ATTRS __lzcnt16(unsigned short __X)
Counts the number of leading zero bits in the operand.
Definition: lzcntintrin.h:45
static __inline unsigned char unsigned int __x
Definition: adxintrin.h:36
#define or
Definition: iso646.h:37