26 #include_next <intrin.h> 33 #if defined(__i386__) || defined(__x86_64__) 51 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__)) 59 __m64 _m_from_float(
float);
60 float _m_to_float(__m64);
64 void __addfsbyte(
unsigned long,
unsigned char);
65 void __addfsdword(
unsigned long,
unsigned long);
66 void __addfsword(
unsigned long,
unsigned short);
67 void __code_seg(
const char *);
71 void __cpuidex(
int[4],
int,
int);
73 __int64 __emul(
int,
int);
75 unsigned __int64 __emulu(
unsigned int,
unsigned int);
76 unsigned int __getcallerseflags(
void);
79 unsigned char __inbyte(
unsigned short);
80 void __inbytestring(
unsigned short,
unsigned char *,
unsigned long);
81 void __incfsbyte(
unsigned long);
82 void __incfsdword(
unsigned long);
83 void __incfsword(
unsigned long);
84 unsigned long __indword(
unsigned short);
85 void __indwordstring(
unsigned short,
unsigned long *,
unsigned long);
86 void __invlpg(
void *);
87 unsigned short __inword(
unsigned short);
88 void __inwordstring(
unsigned short,
unsigned short *,
unsigned long);
90 unsigned __int64 __ll_lshift(
unsigned __int64,
int);
91 __int64 __ll_rshift(__int64,
int);
92 unsigned int __lzcnt(
unsigned int);
95 void __movsb(
unsigned char *,
unsigned char const *,
size_t);
97 void __movsd(
unsigned long *,
unsigned long const *,
size_t);
99 void __movsw(
unsigned short *,
unsigned short const *,
size_t);
102 void __nvreg_restore_fence(
void);
103 void __nvreg_save_fence(
void);
104 void __outbyte(
unsigned short,
unsigned char);
105 void __outbytestring(
unsigned short,
unsigned char *,
unsigned long);
106 void __outdword(
unsigned short,
unsigned long);
107 void __outdwordstring(
unsigned short,
unsigned long *,
unsigned long);
108 void __outword(
unsigned short,
unsigned short);
109 void __outwordstring(
unsigned short,
unsigned short *,
unsigned long);
110 unsigned long __readcr0(
void);
111 unsigned long __readcr2(
void);
113 unsigned long __readcr3(
void);
114 unsigned long __readcr4(
void);
115 unsigned long __readcr8(
void);
116 unsigned int __readdr(
unsigned int);
119 unsigned char __readfsbyte(
unsigned long);
121 unsigned __int64 __readfsqword(
unsigned long);
123 unsigned short __readfsword(
unsigned long);
126 unsigned __int64 __readmsr(
unsigned long);
127 unsigned __int64 __readpmc(
unsigned long);
128 unsigned long __segmentlimit(
unsigned long);
131 void __stosb(
unsigned char *,
unsigned char,
size_t);
133 void __stosd(
unsigned long *,
unsigned long,
size_t);
135 void __stosw(
unsigned short *,
unsigned short,
size_t);
136 void __svm_clgi(
void);
137 void __svm_invlpga(
void *,
int);
138 void __svm_skinit(
int);
139 void __svm_stgi(
void);
140 void __svm_vmload(
size_t);
141 void __svm_vmrun(
size_t);
142 void __svm_vmsave(
size_t);
143 unsigned __int64 __ull_rshift(
unsigned __int64,
int);
144 void __vmx_off(
void);
145 void __vmx_vmptrst(
unsigned __int64 *);
147 void __writecr0(
unsigned int);
149 void __writecr3(
unsigned int);
150 void __writecr4(
unsigned int);
151 void __writecr8(
unsigned int);
152 void __writedr(
unsigned int,
unsigned int);
153 void __writefsbyte(
unsigned long,
unsigned char);
154 void __writefsdword(
unsigned long,
unsigned long);
155 void __writefsqword(
unsigned long,
unsigned __int64);
156 void __writefsword(
unsigned long,
unsigned short);
157 void __writemsr(
unsigned long,
unsigned __int64);
159 void *_AddressOfReturnAddress(
void);
161 unsigned char _BitScanForward(
unsigned long *_Index,
unsigned long _Mask);
163 unsigned char _BitScanReverse(
unsigned long *_Index,
unsigned long _Mask);
165 unsigned char _bittest(
long const *,
long);
167 unsigned char _bittestandcomplement(
long *,
long);
169 unsigned char _bittestandreset(
long *,
long);
171 unsigned char _bittestandset(
long *,
long);
172 void __cdecl _disable(
void);
173 void __cdecl _enable(
void);
174 long _InterlockedAddLargeStatistic(__int64
volatile *_Addend,
long _Value);
175 unsigned char _interlockedbittestandreset(
long volatile *,
long);
176 unsigned char _interlockedbittestandset(
long volatile *,
long);
177 long _InterlockedCompareExchange_HLEAcquire(
long volatile *,
long,
long);
178 long _InterlockedCompareExchange_HLERelease(
long volatile *,
long,
long);
179 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64
volatile *, __int64,
181 __int64 _InterlockedCompareExchange64_HLERelease(__int64
volatile *, __int64,
183 void *_InterlockedCompareExchangePointer_HLEAcquire(
void *
volatile *,
void *,
185 void *_InterlockedCompareExchangePointer_HLERelease(
void *
volatile *,
void *,
187 long _InterlockedExchangeAdd_HLEAcquire(
long volatile *,
long);
188 long _InterlockedExchangeAdd_HLERelease(
long volatile *,
long);
189 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64
volatile *, __int64);
190 __int64 _InterlockedExchangeAdd64_HLERelease(__int64
volatile *, __int64);
191 void __cdecl _invpcid(
unsigned int,
void *);
192 static __inline__
void 193 __attribute__((__deprecated__(
"use other intrinsics or C++11 atomics instead")))
195 static __inline__
void 196 __attribute__((__deprecated__("use other intrinsics
or C++11 atomics instead")))
197 _ReadWriteBarrier(
void);
198 unsigned int _rorx_u32(
unsigned int, const
unsigned int);
199 int _sarx_i32(
int,
unsigned int);
201 int __cdecl _setjmp(jmp_buf);
203 unsigned int _shlx_u32(
unsigned int,
unsigned int);
204 unsigned int _shrx_u32(
unsigned int,
unsigned int);
205 void _Store_HLERelease(
long volatile *,
long);
206 void _Store64_HLERelease(__int64
volatile *, __int64);
207 void _StorePointer_HLERelease(
void *
volatile *,
void *);
208 static __inline__
void 209 __attribute__((__deprecated__(
"use other intrinsics or C++11 atomics instead")))
211 unsigned __int32 xbegin(
void);
214 #define _XCR_XFEATURE_ENABLED_MASK 0 215 unsigned __int64 __cdecl _xgetbv(
unsigned int);
216 void __cdecl _xsetbv(
unsigned int,
unsigned __int64);
220 void __addgsbyte(
unsigned long,
unsigned char);
221 void __addgsdword(
unsigned long,
unsigned long);
222 void __addgsqword(
unsigned long,
unsigned __int64);
223 void __addgsword(
unsigned long,
unsigned short);
225 void __faststorefence(
void);
226 void __incgsbyte(
unsigned long);
227 void __incgsdword(
unsigned long);
228 void __incgsqword(
unsigned long);
229 void __incgsword(
unsigned long);
230 unsigned __int64 __lzcnt64(
unsigned __int64);
232 void __movsq(
unsigned long long *,
unsigned long long const *,
size_t);
234 unsigned char __readgsbyte(
unsigned long);
236 unsigned long __readgsdword(
unsigned long);
238 unsigned __int64 __readgsqword(
unsigned long);
239 unsigned short __readgsword(
unsigned long);
240 unsigned __int64 __shiftleft128(
unsigned __int64 _LowPart,
241 unsigned __int64 _HighPart,
242 unsigned char _Shift);
243 unsigned __int64 __shiftright128(
unsigned __int64 _LowPart,
244 unsigned __int64 _HighPart,
245 unsigned char _Shift);
247 void __stosq(
unsigned __int64 *,
unsigned __int64,
size_t);
248 unsigned char __vmx_on(
unsigned __int64 *);
249 unsigned char __vmx_vmclear(
unsigned __int64 *);
250 unsigned char __vmx_vmlaunch(
void);
251 unsigned char __vmx_vmptrld(
unsigned __int64 *);
252 unsigned char __vmx_vmread(
size_t,
size_t *);
253 unsigned char __vmx_vmresume(
void);
254 unsigned char __vmx_vmwrite(
size_t,
size_t);
255 void __writegsbyte(
unsigned long,
unsigned char);
256 void __writegsdword(
unsigned long,
unsigned long);
257 void __writegsqword(
unsigned long,
unsigned __int64);
258 void __writegsword(
unsigned long,
unsigned short);
260 unsigned char _BitScanForward64(
unsigned long *_Index,
unsigned __int64 _Mask);
262 unsigned char _BitScanReverse64(
unsigned long *_Index,
unsigned __int64 _Mask);
264 unsigned char _bittest64(__int64
const *, __int64);
266 unsigned char _bittestandcomplement64(__int64 *, __int64);
268 unsigned char _bittestandreset64(__int64 *, __int64);
270 unsigned char _bittestandset64(__int64 *, __int64);
271 long _InterlockedAnd_np(
long volatile *_Value,
long _Mask);
272 short _InterlockedAnd16_np(
short volatile *_Value,
short _Mask);
273 __int64 _InterlockedAnd64_np(__int64
volatile *_Value, __int64 _Mask);
274 char _InterlockedAnd8_np(
char volatile *_Value,
char _Mask);
275 unsigned char _interlockedbittestandreset64(__int64
volatile *, __int64);
277 unsigned char _interlockedbittestandset64(__int64
volatile *, __int64);
278 long _InterlockedCompareExchange_np(
long volatile *_Destination,
long _Exchange,
280 unsigned char _InterlockedCompareExchange128(__int64
volatile *_Destination,
281 __int64 _ExchangeHigh,
282 __int64 _ExchangeLow,
283 __int64 *_CompareandResult);
284 unsigned char _InterlockedCompareExchange128_np(__int64
volatile *_Destination,
285 __int64 _ExchangeHigh,
286 __int64 _ExchangeLow,
287 __int64 *_ComparandResult);
288 short _InterlockedCompareExchange16_np(
short volatile *_Destination,
289 short _Exchange,
short _Comparand);
290 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64
volatile *, __int64,
292 __int64 _InterlockedCompareExchange64_HLERelease(__int64
volatile *, __int64,
294 __int64 _InterlockedCompareExchange64_np(__int64
volatile *_Destination,
295 __int64 _Exchange, __int64 _Comparand);
296 void *_InterlockedCompareExchangePointer_np(
void *
volatile *_Destination,
297 void *_Exchange,
void *_Comparand);
298 long _InterlockedOr_np(
long volatile *_Value,
long _Mask);
299 short _InterlockedOr16_np(
short volatile *_Value,
short _Mask);
300 __int64 _InterlockedOr64_np(__int64
volatile *_Value, __int64 _Mask);
301 char _InterlockedOr8_np(
char volatile *_Value,
char _Mask);
302 long _InterlockedXor_np(
long volatile *_Value,
long _Mask);
303 short _InterlockedXor16_np(
short volatile *_Value,
short _Mask);
304 __int64 _InterlockedXor64_np(__int64
volatile *_Value, __int64 _Mask);
305 char _InterlockedXor8_np(
char volatile *_Value,
char _Mask);
306 unsigned __int64 _rorx_u64(
unsigned __int64,
const unsigned int);
307 __int64 _sarx_i64(__int64,
unsigned int);
308 unsigned __int64 _shlx_u64(
unsigned __int64,
unsigned int);
309 unsigned __int64 _shrx_u64(
unsigned __int64,
unsigned int);
311 __int64 __mulh(__int64, __int64);
313 unsigned __int64 __umulh(
unsigned __int64,
unsigned __int64);
315 __int64 _mul128(__int64, __int64, __int64*);
317 unsigned __int64 _umul128(
unsigned __int64,
323 #if defined(__x86_64__) || defined(__arm__) 326 __int64 _InterlockedDecrement64(__int64
volatile *_Addend);
328 __int64 _InterlockedExchange64(__int64
volatile *_Target, __int64 _Value);
330 __int64 _InterlockedExchangeAdd64(__int64
volatile *_Addend, __int64 _Value);
332 __int64 _InterlockedExchangeSub64(__int64
volatile *_Subend, __int64 _Value);
334 __int64 _InterlockedIncrement64(__int64
volatile *_Addend);
336 __int64 _InterlockedOr64(__int64
volatile *_Value, __int64 _Mask);
338 __int64 _InterlockedXor64(__int64
volatile *_Value, __int64 _Mask);
340 __int64 _InterlockedAnd64(__int64
volatile *_Value, __int64 _Mask);
348 _bittest(
long const *_BitBase,
long _BitPos) {
349 return (*_BitBase >> _BitPos) & 1;
352 _bittestandcomplement(
long *_BitBase,
long _BitPos) {
353 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
354 *_BitBase = *_BitBase ^ (1 << _BitPos);
358 _bittestandreset(
long *_BitBase,
long _BitPos) {
359 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
360 *_BitBase = *_BitBase & ~(1 << _BitPos);
364 _bittestandset(
long *_BitBase,
long _BitPos) {
365 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
366 *_BitBase = *_BitBase | (1 << _BitPos);
369 #if defined(__arm__) || defined(__aarch64__) 371 _interlockedbittestandset_acq(
long volatile *_BitBase,
long _BitPos) {
372 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
373 return (_PrevVal >> _BitPos) & 1;
376 _interlockedbittestandset_nf(
long volatile *_BitBase,
long _BitPos) {
377 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
378 return (_PrevVal >> _BitPos) & 1;
381 _interlockedbittestandset_rel(
long volatile *_BitBase,
long _BitPos) {
382 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
383 return (_PrevVal >> _BitPos) & 1;
388 _bittest64(__int64
const *_BitBase, __int64 _BitPos) {
389 return (*_BitBase >> _BitPos) & 1;
392 _bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
393 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
394 *_BitBase = *_BitBase ^ (1ll << _BitPos);
398 _bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
399 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
400 *_BitBase = *_BitBase & ~(1ll << _BitPos);
404 _bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
405 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
406 *_BitBase = *_BitBase | (1ll << _BitPos);
410 _interlockedbittestandset64(__int64
volatile *_BitBase, __int64 _BitPos) {
412 __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
413 return (_PrevVal >> _BitPos) & 1;
419 #if defined(__arm__) || defined(__aarch64__) 421 _InterlockedExchangeAdd8_acq(
char volatile *_Addend,
char _Value) {
422 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
425 _InterlockedExchangeAdd8_nf(
char volatile *_Addend,
char _Value) {
426 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
429 _InterlockedExchangeAdd8_rel(
char volatile *_Addend,
char _Value) {
430 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
433 _InterlockedExchangeAdd16_acq(
short volatile *_Addend,
short _Value) {
434 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
437 _InterlockedExchangeAdd16_nf(
short volatile *_Addend,
short _Value) {
438 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
441 _InterlockedExchangeAdd16_rel(
short volatile *_Addend,
short _Value) {
442 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
445 _InterlockedExchangeAdd_acq(
long volatile *_Addend,
long _Value) {
446 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
449 _InterlockedExchangeAdd_nf(
long volatile *_Addend,
long _Value) {
450 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
453 _InterlockedExchangeAdd_rel(
long volatile *_Addend,
long _Value) {
454 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
457 _InterlockedExchangeAdd64_acq(__int64
volatile *_Addend, __int64 _Value) {
458 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
461 _InterlockedExchangeAdd64_nf(__int64
volatile *_Addend, __int64 _Value) {
462 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
465 _InterlockedExchangeAdd64_rel(__int64
volatile *_Addend, __int64 _Value) {
466 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
472 #if defined(__arm__) || defined(__aarch64__) 474 _InterlockedIncrement16_acq(
short volatile *_Value) {
475 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
478 _InterlockedIncrement16_nf(
short volatile *_Value) {
479 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
482 _InterlockedIncrement16_rel(
short volatile *_Value) {
483 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
486 _InterlockedIncrement_acq(
long volatile *_Value) {
487 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
490 _InterlockedIncrement_nf(
long volatile *_Value) {
491 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
494 _InterlockedIncrement_rel(
long volatile *_Value) {
495 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
498 _InterlockedIncrement64_acq(__int64
volatile *_Value) {
499 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
502 _InterlockedIncrement64_nf(__int64
volatile *_Value) {
503 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
506 _InterlockedIncrement64_rel(__int64
volatile *_Value) {
507 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
513 #if defined(__arm__) || defined(__aarch64__) 515 _InterlockedDecrement16_acq(
short volatile *_Value) {
516 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
519 _InterlockedDecrement16_nf(
short volatile *_Value) {
520 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
523 _InterlockedDecrement16_rel(
short volatile *_Value) {
524 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
527 _InterlockedDecrement_acq(
long volatile *_Value) {
528 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
531 _InterlockedDecrement_nf(
long volatile *_Value) {
532 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
535 _InterlockedDecrement_rel(
long volatile *_Value) {
536 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
539 _InterlockedDecrement64_acq(__int64
volatile *_Value) {
540 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
543 _InterlockedDecrement64_nf(__int64
volatile *_Value) {
544 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
547 _InterlockedDecrement64_rel(__int64
volatile *_Value) {
548 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
554 #if defined(__arm__) || defined(__aarch64__) 556 _InterlockedAnd8_acq(
char volatile *_Value,
char _Mask) {
557 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
560 _InterlockedAnd8_nf(
char volatile *_Value,
char _Mask) {
561 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
564 _InterlockedAnd8_rel(
char volatile *_Value,
char _Mask) {
565 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
568 _InterlockedAnd16_acq(
short volatile *_Value,
short _Mask) {
569 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
572 _InterlockedAnd16_nf(
short volatile *_Value,
short _Mask) {
573 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
576 _InterlockedAnd16_rel(
short volatile *_Value,
short _Mask) {
577 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
580 _InterlockedAnd_acq(
long volatile *_Value,
long _Mask) {
581 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
584 _InterlockedAnd_nf(
long volatile *_Value,
long _Mask) {
585 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
588 _InterlockedAnd_rel(
long volatile *_Value,
long _Mask) {
589 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
592 _InterlockedAnd64_acq(__int64
volatile *_Value, __int64 _Mask) {
593 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
596 _InterlockedAnd64_nf(__int64
volatile *_Value, __int64 _Mask) {
597 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
600 _InterlockedAnd64_rel(__int64
volatile *_Value, __int64 _Mask) {
601 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
607 #if defined(__arm__) || defined(__aarch64__) 609 _InterlockedOr8_acq(
char volatile *_Value,
char _Mask) {
610 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
613 _InterlockedOr8_nf(
char volatile *_Value,
char _Mask) {
614 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
617 _InterlockedOr8_rel(
char volatile *_Value,
char _Mask) {
618 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
621 _InterlockedOr16_acq(
short volatile *_Value,
short _Mask) {
622 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
625 _InterlockedOr16_nf(
short volatile *_Value,
short _Mask) {
626 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
629 _InterlockedOr16_rel(
short volatile *_Value,
short _Mask) {
630 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
633 _InterlockedOr_acq(
long volatile *_Value,
long _Mask) {
634 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
637 _InterlockedOr_nf(
long volatile *_Value,
long _Mask) {
638 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
641 _InterlockedOr_rel(
long volatile *_Value,
long _Mask) {
642 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
645 _InterlockedOr64_acq(__int64
volatile *_Value, __int64 _Mask) {
646 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
649 _InterlockedOr64_nf(__int64
volatile *_Value, __int64 _Mask) {
650 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
653 _InterlockedOr64_rel(__int64
volatile *_Value, __int64 _Mask) {
654 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
660 #if defined(__arm__) || defined(__aarch64__) 662 _InterlockedXor8_acq(
char volatile *_Value,
char _Mask) {
663 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
666 _InterlockedXor8_nf(
char volatile *_Value,
char _Mask) {
667 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
670 _InterlockedXor8_rel(
char volatile *_Value,
char _Mask) {
671 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
674 _InterlockedXor16_acq(
short volatile *_Value,
short _Mask) {
675 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
678 _InterlockedXor16_nf(
short volatile *_Value,
short _Mask) {
679 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
682 _InterlockedXor16_rel(
short volatile *_Value,
short _Mask) {
683 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
686 _InterlockedXor_acq(
long volatile *_Value,
long _Mask) {
687 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
690 _InterlockedXor_nf(
long volatile *_Value,
long _Mask) {
691 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
694 _InterlockedXor_rel(
long volatile *_Value,
long _Mask) {
695 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
698 _InterlockedXor64_acq(__int64
volatile *_Value, __int64 _Mask) {
699 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
702 _InterlockedXor64_nf(__int64
volatile *_Value, __int64 _Mask) {
703 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
706 _InterlockedXor64_rel(__int64
volatile *_Value, __int64 _Mask) {
707 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
713 #if defined(__arm__) || defined(__aarch64__) 715 _InterlockedExchange8_acq(
char volatile *_Target,
char _Value) {
716 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
720 _InterlockedExchange8_nf(
char volatile *_Target,
char _Value) {
721 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
725 _InterlockedExchange8_rel(
char volatile *_Target,
char _Value) {
726 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
730 _InterlockedExchange16_acq(
short volatile *_Target,
short _Value) {
731 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
735 _InterlockedExchange16_nf(
short volatile *_Target,
short _Value) {
736 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
740 _InterlockedExchange16_rel(
short volatile *_Target,
short _Value) {
741 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
745 _InterlockedExchange_acq(
long volatile *_Target,
long _Value) {
746 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
750 _InterlockedExchange_nf(
long volatile *_Target,
long _Value) {
751 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
755 _InterlockedExchange_rel(
long volatile *_Target,
long _Value) {
756 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
760 _InterlockedExchange64_acq(__int64
volatile *_Target, __int64 _Value) {
761 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
765 _InterlockedExchange64_nf(__int64
volatile *_Target, __int64 _Value) {
766 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
770 _InterlockedExchange64_rel(__int64
volatile *_Target, __int64 _Value) {
771 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
778 #if defined(__arm__) || defined(__aarch64__) 780 _InterlockedCompareExchange8_acq(
char volatile *_Destination,
781 char _Exchange,
char _Comparand) {
782 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
783 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
787 _InterlockedCompareExchange8_nf(
char volatile *_Destination,
788 char _Exchange,
char _Comparand) {
789 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
790 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
794 _InterlockedCompareExchange8_rel(
char volatile *_Destination,
795 char _Exchange,
char _Comparand) {
796 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
797 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
801 _InterlockedCompareExchange16_acq(
short volatile *_Destination,
802 short _Exchange,
short _Comparand) {
803 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
804 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
808 _InterlockedCompareExchange16_nf(
short volatile *_Destination,
809 short _Exchange,
short _Comparand) {
810 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
811 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
815 _InterlockedCompareExchange16_rel(
short volatile *_Destination,
816 short _Exchange,
short _Comparand) {
817 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
818 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
822 _InterlockedCompareExchange_acq(
long volatile *_Destination,
823 long _Exchange,
long _Comparand) {
824 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
825 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
829 _InterlockedCompareExchange_nf(
long volatile *_Destination,
830 long _Exchange,
long _Comparand) {
831 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
832 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
836 _InterlockedCompareExchange_rel(
long volatile *_Destination,
837 long _Exchange,
long _Comparand) {
838 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
839 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
843 _InterlockedCompareExchange64_acq(__int64
volatile *_Destination,
844 __int64 _Exchange, __int64 _Comparand) {
845 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
846 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
850 _InterlockedCompareExchange64_nf(__int64
volatile *_Destination,
851 __int64 _Exchange, __int64 _Comparand) {
852 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
853 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
857 _InterlockedCompareExchange64_rel(__int64
volatile *_Destination,
858 __int64 _Exchange, __int64 _Comparand) {
859 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
860 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
868 #if defined(__i386__) || defined(__x86_64__) 870 __movsb(
unsigned char *__dst,
unsigned char const *__src,
size_t __n) {
871 __asm__(
"rep movsb" : :
"D"(__dst),
"S"(__src),
"c"(__n));
874 __movsd(
unsigned long *__dst,
unsigned long const *__src,
size_t __n) {
875 __asm__(
"rep movsl" : :
"D"(__dst),
"S"(__src),
"c"(__n));
878 __movsw(
unsigned short *__dst,
unsigned short const *__src,
size_t __n) {
879 __asm__(
"rep movsw" : :
"D"(__dst),
"S"(__src),
"c"(__n));
882 __stosd(
unsigned long *__dst,
unsigned long __x,
size_t __n) {
883 __asm__(
"rep stosl" : :
"D"(__dst),
"a"(__x),
"c"(__n));
886 __stosw(
unsigned short *__dst,
unsigned short __x,
size_t __n) {
887 __asm__(
"rep stosw" : :
"D"(__dst),
"a"(__x),
"c"(__n));
892 __movsq(
unsigned long long *__dst,
unsigned long long const *__src,
size_t __n) {
893 __asm__(
"rep movsq" : :
"D"(__dst),
"S"(__src),
"c"(__n));
896 __stosq(
unsigned __int64 *__dst,
unsigned __int64 __x,
size_t __n) {
897 __asm__(
"rep stosq" : :
"D"(__dst),
"a"(__x),
"c"(__n));
904 #if defined(__i386__) || defined(__x86_64__) 906 __cpuid(
int __info[4],
int __level) {
907 __asm__ (
"cpuid" :
"=a"(__info[0]),
"=b" (__info[1]),
"=c"(__info[2]),
"=d"(__info[3])
911 __cpuidex(
int __info[4],
int __level,
int __ecx) {
912 __asm__ (
"cpuid" :
"=a"(__info[0]),
"=b" (__info[1]),
"=c"(__info[2]),
"=d"(__info[3])
913 :
"a"(__level),
"c"(__ecx));
916 _xgetbv(
unsigned int __xcr_no) {
917 unsigned int __eax, __edx;
918 __asm__ (
"xgetbv" :
"=a" (__eax),
"=d" (__edx) :
"c" (__xcr_no));
919 return ((
unsigned __int64)__edx << 32) | __eax;
923 __asm__
volatile (
"hlt");
927 __asm__
volatile (
"nop");
934 #if defined(__i386__) || defined(__x86_64__) 936 __readmsr(
unsigned long __register) {
945 __asm__ (
"rdmsr" :
"=d"(__edx),
"=a"(__eax) :
"c"(__register));
946 return (((
unsigned __int64)__edx) << 32) | (
unsigned __int64)__eax;
951 unsigned long __cr3_val;
952 __asm__ __volatile__ (
"mov %%cr3, %0" :
"=q"(__cr3_val) : :
"memory");
957 __writecr3(
unsigned int __cr3_val) {
958 __asm__ (
"mov %0, %%cr3" : :
"q"(__cr3_val) :
"memory");
966 #undef __DEFAULT_FN_ATTRS #define __cpuid(__leaf, __eax, __ebx, __ecx, __edx)
static __inline__ void __DEFAULT_FN_ATTRS _xend(void)
char __v64qi __attribute__((__vector_size__(64)))
#define __DEFAULT_FN_ATTRS
static __inline__ unsigned short __DEFAULT_FN_ATTRS __lzcnt16(unsigned short __X)
Counts the number of leading zero bits in the operand.
static __inline unsigned char unsigned int __x