1/* ===-------- intrin.h ---------------------------------------------------=== 2 * 3 * Permission is hereby granted, free of charge, to any person obtaining a copy 4 * of this software and associated documentation files (the "Software"), to deal 5 * in the Software without restriction, including without limitation the rights 6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 * copies of the Software, and to permit persons to whom the Software is 8 * furnished to do so, subject to the following conditions: 9 * 10 * The above copyright notice and this permission notice shall be included in 11 * all copies or substantial portions of the Software. 12 * 13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 * THE SOFTWARE. 20 * 21 *===-----------------------------------------------------------------------=== 22 */ 23 24/* Only include this if we're compiling for the windows platform. */ 25#ifndef _MSC_VER 26#include_next <intrin.h> 27#else 28 29#ifndef __INTRIN_H 30#define __INTRIN_H 31 32/* First include the standard intrinsics. */ 33#if defined(__i386__) || defined(__x86_64__) 34#include <x86intrin.h> 35#endif 36 37#if defined(__arm__) 38#include <armintr.h> 39#endif 40 41/* For the definition of jmp_buf. */ 42#if __STDC_HOSTED__ 43#include <setjmp.h> 44#endif 45 46/* Define the default attributes for the functions in this file. */ 47#define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__)) 48 49#ifdef __cplusplus 50extern "C" { 51#endif 52 53#if defined(__MMX__) 54/* And the random ones that aren't in those files. */ 55__m64 _m_from_float(float); 56float _m_to_float(__m64); 57#endif 58 59/* Other assorted instruction intrinsics. */ 60void __addfsbyte(unsigned long, unsigned char); 61void __addfsdword(unsigned long, unsigned long); 62void __addfsword(unsigned long, unsigned short); 63void __code_seg(const char *); 64static __inline__ 65void __cpuid(int[4], int); 66static __inline__ 67void __cpuidex(int[4], int, int); 68static __inline__ 69__int64 __emul(int, int); 70static __inline__ 71unsigned __int64 __emulu(unsigned int, unsigned int); 72unsigned int __getcallerseflags(void); 73static __inline__ 74void __halt(void); 75unsigned char __inbyte(unsigned short); 76void __inbytestring(unsigned short, unsigned char *, unsigned long); 77void __incfsbyte(unsigned long); 78void __incfsdword(unsigned long); 79void __incfsword(unsigned long); 80unsigned long __indword(unsigned short); 81void __indwordstring(unsigned short, unsigned long *, unsigned long); 82void __invlpg(void *); 83unsigned short __inword(unsigned short); 84void __inwordstring(unsigned short, unsigned short *, unsigned long); 85void __lidt(void *); 86unsigned __int64 __ll_lshift(unsigned __int64, int); 87__int64 __ll_rshift(__int64, int); 88unsigned int __lzcnt(unsigned int); 89unsigned short __lzcnt16(unsigned short); 90static __inline__ 91void __movsb(unsigned char *, unsigned char const *, size_t); 92static __inline__ 93void __movsd(unsigned long *, unsigned long const *, size_t); 94static __inline__ 95void __movsw(unsigned short *, unsigned short const *, size_t); 96static __inline__ 97void __nop(void); 98void __nvreg_restore_fence(void); 99void __nvreg_save_fence(void); 100void __outbyte(unsigned short, unsigned char); 101void __outbytestring(unsigned short, unsigned char *, unsigned long); 102void __outdword(unsigned short, unsigned long); 103void __outdwordstring(unsigned short, unsigned long *, unsigned long); 104void __outword(unsigned short, unsigned short); 105void __outwordstring(unsigned short, unsigned short *, unsigned long); 106unsigned long __readcr0(void); 107unsigned long __readcr2(void); 108static __inline__ 109unsigned long __readcr3(void); 110unsigned long __readcr4(void); 111unsigned long __readcr8(void); 112unsigned int __readdr(unsigned int); 113#ifdef __i386__ 114static __inline__ 115unsigned char __readfsbyte(unsigned long); 116static __inline__ 117unsigned __int64 __readfsqword(unsigned long); 118static __inline__ 119unsigned short __readfsword(unsigned long); 120#endif 121static __inline__ 122unsigned __int64 __readmsr(unsigned long); 123unsigned __int64 __readpmc(unsigned long); 124unsigned long __segmentlimit(unsigned long); 125void __sidt(void *); 126static __inline__ 127void __stosb(unsigned char *, unsigned char, size_t); 128static __inline__ 129void __stosd(unsigned long *, unsigned long, size_t); 130static __inline__ 131void __stosw(unsigned short *, unsigned short, size_t); 132void __svm_clgi(void); 133void __svm_invlpga(void *, int); 134void __svm_skinit(int); 135void __svm_stgi(void); 136void __svm_vmload(size_t); 137void __svm_vmrun(size_t); 138void __svm_vmsave(size_t); 139unsigned __int64 __ull_rshift(unsigned __int64, int); 140void __vmx_off(void); 141void __vmx_vmptrst(unsigned __int64 *); 142void __wbinvd(void); 143void __writecr0(unsigned int); 144static __inline__ 145void __writecr3(unsigned int); 146void __writecr4(unsigned int); 147void __writecr8(unsigned int); 148void __writedr(unsigned int, unsigned int); 149void __writefsbyte(unsigned long, unsigned char); 150void __writefsdword(unsigned long, unsigned long); 151void __writefsqword(unsigned long, unsigned __int64); 152void __writefsword(unsigned long, unsigned short); 153void __writemsr(unsigned long, unsigned __int64); 154static __inline__ 155void *_AddressOfReturnAddress(void); 156static __inline__ 157unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask); 158static __inline__ 159unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask); 160static __inline__ 161unsigned char _bittest(long const *, long); 162static __inline__ 163unsigned char _bittestandcomplement(long *, long); 164static __inline__ 165unsigned char _bittestandreset(long *, long); 166static __inline__ 167unsigned char _bittestandset(long *, long); 168void __cdecl _disable(void); 169void __cdecl _enable(void); 170long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value); 171unsigned char _interlockedbittestandreset(long volatile *, long); 172unsigned char _interlockedbittestandset(long volatile *, long); 173long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long); 174long _InterlockedCompareExchange_HLERelease(long volatile *, long, long); 175__int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64, 176 __int64); 177__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64, 178 __int64); 179void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *, 180 void *); 181void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *, 182 void *); 183long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long); 184long _InterlockedExchangeAdd_HLERelease(long volatile *, long); 185__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64); 186__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64); 187void __cdecl _invpcid(unsigned int, void *); 188static __inline__ void 189__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead"))) 190_ReadBarrier(void); 191static __inline__ void 192__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead"))) 193_ReadWriteBarrier(void); 194unsigned int _rorx_u32(unsigned int, const unsigned int); 195int _sarx_i32(int, unsigned int); 196#if __STDC_HOSTED__ 197int __cdecl _setjmp(jmp_buf); 198#endif 199unsigned int _shlx_u32(unsigned int, unsigned int); 200unsigned int _shrx_u32(unsigned int, unsigned int); 201void _Store_HLERelease(long volatile *, long); 202void _Store64_HLERelease(__int64 volatile *, __int64); 203void _StorePointer_HLERelease(void *volatile *, void *); 204static __inline__ void 205__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead"))) 206_WriteBarrier(void); 207unsigned __int32 xbegin(void); 208void _xend(void); 209static __inline__ 210#define _XCR_XFEATURE_ENABLED_MASK 0 211unsigned __int64 __cdecl _xgetbv(unsigned int); 212void __cdecl _xsetbv(unsigned int, unsigned __int64); 213 214/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */ 215#ifdef __x86_64__ 216void __addgsbyte(unsigned long, unsigned char); 217void __addgsdword(unsigned long, unsigned long); 218void __addgsqword(unsigned long, unsigned __int64); 219void __addgsword(unsigned long, unsigned short); 220static __inline__ 221void __faststorefence(void); 222void __incgsbyte(unsigned long); 223void __incgsdword(unsigned long); 224void __incgsqword(unsigned long); 225void __incgsword(unsigned long); 226unsigned __int64 __lzcnt64(unsigned __int64); 227static __inline__ 228void __movsq(unsigned long long *, unsigned long long const *, size_t); 229static __inline__ 230unsigned char __readgsbyte(unsigned long); 231static __inline__ 232unsigned long __readgsdword(unsigned long); 233static __inline__ 234unsigned __int64 __readgsqword(unsigned long); 235unsigned short __readgsword(unsigned long); 236unsigned __int64 __shiftleft128(unsigned __int64 _LowPart, 237 unsigned __int64 _HighPart, 238 unsigned char _Shift); 239unsigned __int64 __shiftright128(unsigned __int64 _LowPart, 240 unsigned __int64 _HighPart, 241 unsigned char _Shift); 242static __inline__ 243void __stosq(unsigned __int64 *, unsigned __int64, size_t); 244unsigned char __vmx_on(unsigned __int64 *); 245unsigned char __vmx_vmclear(unsigned __int64 *); 246unsigned char __vmx_vmlaunch(void); 247unsigned char __vmx_vmptrld(unsigned __int64 *); 248unsigned char __vmx_vmread(size_t, size_t *); 249unsigned char __vmx_vmresume(void); 250unsigned char __vmx_vmwrite(size_t, size_t); 251void __writegsbyte(unsigned long, unsigned char); 252void __writegsdword(unsigned long, unsigned long); 253void __writegsqword(unsigned long, unsigned __int64); 254void __writegsword(unsigned long, unsigned short); 255static __inline__ 256unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask); 257static __inline__ 258unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask); 259static __inline__ 260unsigned char _bittest64(__int64 const *, __int64); 261static __inline__ 262unsigned char _bittestandcomplement64(__int64 *, __int64); 263static __inline__ 264unsigned char _bittestandreset64(__int64 *, __int64); 265static __inline__ 266unsigned char _bittestandset64(__int64 *, __int64); 267long _InterlockedAnd_np(long volatile *_Value, long _Mask); 268short _InterlockedAnd16_np(short volatile *_Value, short _Mask); 269__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask); 270char _InterlockedAnd8_np(char volatile *_Value, char _Mask); 271unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64); 272static __inline__ 273unsigned char _interlockedbittestandset64(__int64 volatile *, __int64); 274long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange, 275 long _Comparand); 276unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination, 277 __int64 _ExchangeHigh, 278 __int64 _ExchangeLow, 279 __int64 *_CompareandResult); 280unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination, 281 __int64 _ExchangeHigh, 282 __int64 _ExchangeLow, 283 __int64 *_ComparandResult); 284short _InterlockedCompareExchange16_np(short volatile *_Destination, 285 short _Exchange, short _Comparand); 286__int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64, 287 __int64); 288__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64, 289 __int64); 290__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination, 291 __int64 _Exchange, __int64 _Comparand); 292void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination, 293 void *_Exchange, void *_Comparand); 294long _InterlockedOr_np(long volatile *_Value, long _Mask); 295short _InterlockedOr16_np(short volatile *_Value, short _Mask); 296__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask); 297char _InterlockedOr8_np(char volatile *_Value, char _Mask); 298long _InterlockedXor_np(long volatile *_Value, long _Mask); 299short _InterlockedXor16_np(short volatile *_Value, short _Mask); 300__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask); 301char _InterlockedXor8_np(char volatile *_Value, char _Mask); 302unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int); 303__int64 _sarx_i64(__int64, unsigned int); 304unsigned __int64 _shlx_u64(unsigned __int64, unsigned int); 305unsigned __int64 _shrx_u64(unsigned __int64, unsigned int); 306static __inline__ 307__int64 __mulh(__int64, __int64); 308static __inline__ 309unsigned __int64 __umulh(unsigned __int64, unsigned __int64); 310static __inline__ 311__int64 _mul128(__int64, __int64, __int64*); 312static __inline__ 313unsigned __int64 _umul128(unsigned __int64, 314 unsigned __int64, 315 unsigned __int64*); 316 317#endif /* __x86_64__ */ 318 319#if defined(__x86_64__) || defined(__arm__) 320 321static __inline__ 322__int64 _InterlockedDecrement64(__int64 volatile *_Addend); 323static __inline__ 324__int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value); 325static __inline__ 326__int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value); 327static __inline__ 328__int64 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value); 329static __inline__ 330__int64 _InterlockedIncrement64(__int64 volatile *_Addend); 331static __inline__ 332__int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask); 333static __inline__ 334__int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask); 335static __inline__ 336__int64 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask); 337 338#endif 339 340/*----------------------------------------------------------------------------*\ 341|* Bit Counting and Testing 342\*----------------------------------------------------------------------------*/ 343static __inline__ unsigned char __DEFAULT_FN_ATTRS 344_bittest(long const *_BitBase, long _BitPos) { 345 return (*_BitBase >> _BitPos) & 1; 346} 347static __inline__ unsigned char __DEFAULT_FN_ATTRS 348_bittestandcomplement(long *_BitBase, long _BitPos) { 349 unsigned char _Res = (*_BitBase >> _BitPos) & 1; 350 *_BitBase = *_BitBase ^ (1 << _BitPos); 351 return _Res; 352} 353static __inline__ unsigned char __DEFAULT_FN_ATTRS 354_bittestandreset(long *_BitBase, long _BitPos) { 355 unsigned char _Res = (*_BitBase >> _BitPos) & 1; 356 *_BitBase = *_BitBase & ~(1 << _BitPos); 357 return _Res; 358} 359static __inline__ unsigned char __DEFAULT_FN_ATTRS 360_bittestandset(long *_BitBase, long _BitPos) { 361 unsigned char _Res = (*_BitBase >> _BitPos) & 1; 362 *_BitBase = *_BitBase | (1 << _BitPos); 363 return _Res; 364} 365#if defined(__arm__) || defined(__aarch64__) 366static __inline__ unsigned char __DEFAULT_FN_ATTRS 367_interlockedbittestandset_acq(long volatile *_BitBase, long _BitPos) { 368 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE); 369 return (_PrevVal >> _BitPos) & 1; 370} 371static __inline__ unsigned char __DEFAULT_FN_ATTRS 372_interlockedbittestandset_nf(long volatile *_BitBase, long _BitPos) { 373 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED); 374 return (_PrevVal >> _BitPos) & 1; 375} 376static __inline__ unsigned char __DEFAULT_FN_ATTRS 377_interlockedbittestandset_rel(long volatile *_BitBase, long _BitPos) { 378 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE); 379 return (_PrevVal >> _BitPos) & 1; 380} 381#endif 382#ifdef __x86_64__ 383static __inline__ unsigned char __DEFAULT_FN_ATTRS 384_bittest64(__int64 const *_BitBase, __int64 _BitPos) { 385 return (*_BitBase >> _BitPos) & 1; 386} 387static __inline__ unsigned char __DEFAULT_FN_ATTRS 388_bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) { 389 unsigned char _Res = (*_BitBase >> _BitPos) & 1; 390 *_BitBase = *_BitBase ^ (1ll << _BitPos); 391 return _Res; 392} 393static __inline__ unsigned char __DEFAULT_FN_ATTRS 394_bittestandreset64(__int64 *_BitBase, __int64 _BitPos) { 395 unsigned char _Res = (*_BitBase >> _BitPos) & 1; 396 *_BitBase = *_BitBase & ~(1ll << _BitPos); 397 return _Res; 398} 399static __inline__ unsigned char __DEFAULT_FN_ATTRS 400_bittestandset64(__int64 *_BitBase, __int64 _BitPos) { 401 unsigned char _Res = (*_BitBase >> _BitPos) & 1; 402 *_BitBase = *_BitBase | (1ll << _BitPos); 403 return _Res; 404} 405static __inline__ unsigned char __DEFAULT_FN_ATTRS 406_interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) { 407 long long _PrevVal = 408 __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST); 409 return (_PrevVal >> _BitPos) & 1; 410} 411#endif 412/*----------------------------------------------------------------------------*\ 413|* Interlocked Exchange Add 414\*----------------------------------------------------------------------------*/ 415#if defined(__arm__) || defined(__aarch64__) 416static __inline__ char __DEFAULT_FN_ATTRS 417_InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value) { 418 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE); 419} 420static __inline__ char __DEFAULT_FN_ATTRS 421_InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value) { 422 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED); 423} 424static __inline__ char __DEFAULT_FN_ATTRS 425_InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value) { 426 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED); 427} 428static __inline__ short __DEFAULT_FN_ATTRS 429_InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value) { 430 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE); 431} 432static __inline__ short __DEFAULT_FN_ATTRS 433_InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value) { 434 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED); 435} 436static __inline__ short __DEFAULT_FN_ATTRS 437_InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value) { 438 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE); 439} 440static __inline__ long __DEFAULT_FN_ATTRS 441_InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value) { 442 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE); 443} 444static __inline__ long __DEFAULT_FN_ATTRS 445_InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value) { 446 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED); 447} 448static __inline__ long __DEFAULT_FN_ATTRS 449_InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value) { 450 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE); 451} 452static __inline__ __int64 __DEFAULT_FN_ATTRS 453_InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value) { 454 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE); 455} 456static __inline__ __int64 __DEFAULT_FN_ATTRS 457_InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value) { 458 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED); 459} 460static __inline__ __int64 __DEFAULT_FN_ATTRS 461_InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value) { 462 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE); 463} 464#endif 465/*----------------------------------------------------------------------------*\ 466|* Interlocked Increment 467\*----------------------------------------------------------------------------*/ 468#if defined(__arm__) || defined(__aarch64__) 469static __inline__ short __DEFAULT_FN_ATTRS 470_InterlockedIncrement16_acq(short volatile *_Value) { 471 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE); 472} 473static __inline__ short __DEFAULT_FN_ATTRS 474_InterlockedIncrement16_nf(short volatile *_Value) { 475 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED); 476} 477static __inline__ short __DEFAULT_FN_ATTRS 478_InterlockedIncrement16_rel(short volatile *_Value) { 479 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE); 480} 481static __inline__ long __DEFAULT_FN_ATTRS 482_InterlockedIncrement_acq(long volatile *_Value) { 483 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE); 484} 485static __inline__ long __DEFAULT_FN_ATTRS 486_InterlockedIncrement_nf(long volatile *_Value) { 487 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED); 488} 489static __inline__ long __DEFAULT_FN_ATTRS 490_InterlockedIncrement_rel(long volatile *_Value) { 491 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE); 492} 493static __inline__ __int64 __DEFAULT_FN_ATTRS 494_InterlockedIncrement64_acq(__int64 volatile *_Value) { 495 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE); 496} 497static __inline__ __int64 __DEFAULT_FN_ATTRS 498_InterlockedIncrement64_nf(__int64 volatile *_Value) { 499 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED); 500} 501static __inline__ __int64 __DEFAULT_FN_ATTRS 502_InterlockedIncrement64_rel(__int64 volatile *_Value) { 503 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE); 504} 505#endif 506/*----------------------------------------------------------------------------*\ 507|* Interlocked Decrement 508\*----------------------------------------------------------------------------*/ 509#if defined(__arm__) || defined(__aarch64__) 510static __inline__ short __DEFAULT_FN_ATTRS 511_InterlockedDecrement16_acq(short volatile *_Value) { 512 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE); 513} 514static __inline__ short __DEFAULT_FN_ATTRS 515_InterlockedDecrement16_nf(short volatile *_Value) { 516 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED); 517} 518static __inline__ short __DEFAULT_FN_ATTRS 519_InterlockedDecrement16_rel(short volatile *_Value) { 520 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE); 521} 522static __inline__ long __DEFAULT_FN_ATTRS 523_InterlockedDecrement_acq(long volatile *_Value) { 524 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE); 525} 526static __inline__ long __DEFAULT_FN_ATTRS 527_InterlockedDecrement_nf(long volatile *_Value) { 528 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED); 529} 530static __inline__ long __DEFAULT_FN_ATTRS 531_InterlockedDecrement_rel(long volatile *_Value) { 532 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE); 533} 534static __inline__ __int64 __DEFAULT_FN_ATTRS 535_InterlockedDecrement64_acq(__int64 volatile *_Value) { 536 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE); 537} 538static __inline__ __int64 __DEFAULT_FN_ATTRS 539_InterlockedDecrement64_nf(__int64 volatile *_Value) { 540 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED); 541} 542static __inline__ __int64 __DEFAULT_FN_ATTRS 543_InterlockedDecrement64_rel(__int64 volatile *_Value) { 544 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE); 545} 546#endif 547/*----------------------------------------------------------------------------*\ 548|* Interlocked And 549\*----------------------------------------------------------------------------*/ 550#if defined(__arm__) || defined(__aarch64__) 551static __inline__ char __DEFAULT_FN_ATTRS 552_InterlockedAnd8_acq(char volatile *_Value, char _Mask) { 553 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE); 554} 555static __inline__ char __DEFAULT_FN_ATTRS 556_InterlockedAnd8_nf(char volatile *_Value, char _Mask) { 557 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED); 558} 559static __inline__ char __DEFAULT_FN_ATTRS 560_InterlockedAnd8_rel(char volatile *_Value, char _Mask) { 561 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE); 562} 563static __inline__ short __DEFAULT_FN_ATTRS 564_InterlockedAnd16_acq(short volatile *_Value, short _Mask) { 565 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE); 566} 567static __inline__ short __DEFAULT_FN_ATTRS 568_InterlockedAnd16_nf(short volatile *_Value, short _Mask) { 569 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED); 570} 571static __inline__ short __DEFAULT_FN_ATTRS 572_InterlockedAnd16_rel(short volatile *_Value, short _Mask) { 573 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE); 574} 575static __inline__ long __DEFAULT_FN_ATTRS 576_InterlockedAnd_acq(long volatile *_Value, long _Mask) { 577 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE); 578} 579static __inline__ long __DEFAULT_FN_ATTRS 580_InterlockedAnd_nf(long volatile *_Value, long _Mask) { 581 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED); 582} 583static __inline__ long __DEFAULT_FN_ATTRS 584_InterlockedAnd_rel(long volatile *_Value, long _Mask) { 585 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE); 586} 587static __inline__ __int64 __DEFAULT_FN_ATTRS 588_InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask) { 589 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE); 590} 591static __inline__ __int64 __DEFAULT_FN_ATTRS 592_InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask) { 593 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED); 594} 595static __inline__ __int64 __DEFAULT_FN_ATTRS 596_InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask) { 597 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE); 598} 599#endif 600/*----------------------------------------------------------------------------*\ 601|* Interlocked Or 602\*----------------------------------------------------------------------------*/ 603#if defined(__arm__) || defined(__aarch64__) 604static __inline__ char __DEFAULT_FN_ATTRS 605_InterlockedOr8_acq(char volatile *_Value, char _Mask) { 606 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE); 607} 608static __inline__ char __DEFAULT_FN_ATTRS 609_InterlockedOr8_nf(char volatile *_Value, char _Mask) { 610 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED); 611} 612static __inline__ char __DEFAULT_FN_ATTRS 613_InterlockedOr8_rel(char volatile *_Value, char _Mask) { 614 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE); 615} 616static __inline__ short __DEFAULT_FN_ATTRS 617_InterlockedOr16_acq(short volatile *_Value, short _Mask) { 618 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE); 619} 620static __inline__ short __DEFAULT_FN_ATTRS 621_InterlockedOr16_nf(short volatile *_Value, short _Mask) { 622 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED); 623} 624static __inline__ short __DEFAULT_FN_ATTRS 625_InterlockedOr16_rel(short volatile *_Value, short _Mask) { 626 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE); 627} 628static __inline__ long __DEFAULT_FN_ATTRS 629_InterlockedOr_acq(long volatile *_Value, long _Mask) { 630 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE); 631} 632static __inline__ long __DEFAULT_FN_ATTRS 633_InterlockedOr_nf(long volatile *_Value, long _Mask) { 634 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED); 635} 636static __inline__ long __DEFAULT_FN_ATTRS 637_InterlockedOr_rel(long volatile *_Value, long _Mask) { 638 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE); 639} 640static __inline__ __int64 __DEFAULT_FN_ATTRS 641_InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask) { 642 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE); 643} 644static __inline__ __int64 __DEFAULT_FN_ATTRS 645_InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask) { 646 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED); 647} 648static __inline__ __int64 __DEFAULT_FN_ATTRS 649_InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask) { 650 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE); 651} 652#endif 653/*----------------------------------------------------------------------------*\ 654|* Interlocked Xor 655\*----------------------------------------------------------------------------*/ 656#if defined(__arm__) || defined(__aarch64__) 657static __inline__ char __DEFAULT_FN_ATTRS 658_InterlockedXor8_acq(char volatile *_Value, char _Mask) { 659 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE); 660} 661static __inline__ char __DEFAULT_FN_ATTRS 662_InterlockedXor8_nf(char volatile *_Value, char _Mask) { 663 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED); 664} 665static __inline__ char __DEFAULT_FN_ATTRS 666_InterlockedXor8_rel(char volatile *_Value, char _Mask) { 667 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE); 668} 669static __inline__ short __DEFAULT_FN_ATTRS 670_InterlockedXor16_acq(short volatile *_Value, short _Mask) { 671 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE); 672} 673static __inline__ short __DEFAULT_FN_ATTRS 674_InterlockedXor16_nf(short volatile *_Value, short _Mask) { 675 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED); 676} 677static __inline__ short __DEFAULT_FN_ATTRS 678_InterlockedXor16_rel(short volatile *_Value, short _Mask) { 679 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE); 680} 681static __inline__ long __DEFAULT_FN_ATTRS 682_InterlockedXor_acq(long volatile *_Value, long _Mask) { 683 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE); 684} 685static __inline__ long __DEFAULT_FN_ATTRS 686_InterlockedXor_nf(long volatile *_Value, long _Mask) { 687 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED); 688} 689static __inline__ long __DEFAULT_FN_ATTRS 690_InterlockedXor_rel(long volatile *_Value, long _Mask) { 691 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE); 692} 693static __inline__ __int64 __DEFAULT_FN_ATTRS 694_InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask) { 695 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE); 696} 697static __inline__ __int64 __DEFAULT_FN_ATTRS 698_InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask) { 699 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED); 700} 701static __inline__ __int64 __DEFAULT_FN_ATTRS 702_InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask) { 703 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE); 704} 705#endif 706/*----------------------------------------------------------------------------*\ 707|* Interlocked Exchange 708\*----------------------------------------------------------------------------*/ 709#if defined(__arm__) || defined(__aarch64__) 710static __inline__ char __DEFAULT_FN_ATTRS 711_InterlockedExchange8_acq(char volatile *_Target, char _Value) { 712 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE); 713 return _Value; 714} 715static __inline__ char __DEFAULT_FN_ATTRS 716_InterlockedExchange8_nf(char volatile *_Target, char _Value) { 717 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED); 718 return _Value; 719} 720static __inline__ char __DEFAULT_FN_ATTRS 721_InterlockedExchange8_rel(char volatile *_Target, char _Value) { 722 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE); 723 return _Value; 724} 725static __inline__ short __DEFAULT_FN_ATTRS 726_InterlockedExchange16_acq(short volatile *_Target, short _Value) { 727 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE); 728 return _Value; 729} 730static __inline__ short __DEFAULT_FN_ATTRS 731_InterlockedExchange16_nf(short volatile *_Target, short _Value) { 732 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED); 733 return _Value; 734} 735static __inline__ short __DEFAULT_FN_ATTRS 736_InterlockedExchange16_rel(short volatile *_Target, short _Value) { 737 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE); 738 return _Value; 739} 740static __inline__ long __DEFAULT_FN_ATTRS 741_InterlockedExchange_acq(long volatile *_Target, long _Value) { 742 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE); 743 return _Value; 744} 745static __inline__ long __DEFAULT_FN_ATTRS 746_InterlockedExchange_nf(long volatile *_Target, long _Value) { 747 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED); 748 return _Value; 749} 750static __inline__ long __DEFAULT_FN_ATTRS 751_InterlockedExchange_rel(long volatile *_Target, long _Value) { 752 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE); 753 return _Value; 754} 755static __inline__ __int64 __DEFAULT_FN_ATTRS 756_InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value) { 757 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE); 758 return _Value; 759} 760static __inline__ __int64 __DEFAULT_FN_ATTRS 761_InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value) { 762 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED); 763 return _Value; 764} 765static __inline__ __int64 __DEFAULT_FN_ATTRS 766_InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value) { 767 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE); 768 return _Value; 769} 770#endif 771/*----------------------------------------------------------------------------*\ 772|* Interlocked Compare Exchange 773\*----------------------------------------------------------------------------*/ 774#if defined(__arm__) || defined(__aarch64__) 775static __inline__ char __DEFAULT_FN_ATTRS 776_InterlockedCompareExchange8_acq(char volatile *_Destination, 777 char _Exchange, char _Comparand) { 778 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 779 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE); 780 return _Comparand; 781} 782static __inline__ char __DEFAULT_FN_ATTRS 783_InterlockedCompareExchange8_nf(char volatile *_Destination, 784 char _Exchange, char _Comparand) { 785 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 786 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED); 787 return _Comparand; 788} 789static __inline__ char __DEFAULT_FN_ATTRS 790_InterlockedCompareExchange8_rel(char volatile *_Destination, 791 char _Exchange, char _Comparand) { 792 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 793 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE); 794 return _Comparand; 795} 796static __inline__ short __DEFAULT_FN_ATTRS 797_InterlockedCompareExchange16_acq(short volatile *_Destination, 798 short _Exchange, short _Comparand) { 799 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 800 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE); 801 return _Comparand; 802} 803static __inline__ short __DEFAULT_FN_ATTRS 804_InterlockedCompareExchange16_nf(short volatile *_Destination, 805 short _Exchange, short _Comparand) { 806 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 807 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED); 808 return _Comparand; 809} 810static __inline__ short __DEFAULT_FN_ATTRS 811_InterlockedCompareExchange16_rel(short volatile *_Destination, 812 short _Exchange, short _Comparand) { 813 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 814 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE); 815 return _Comparand; 816} 817static __inline__ long __DEFAULT_FN_ATTRS 818_InterlockedCompareExchange_acq(long volatile *_Destination, 819 long _Exchange, long _Comparand) { 820 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 821 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE); 822 return _Comparand; 823} 824static __inline__ long __DEFAULT_FN_ATTRS 825_InterlockedCompareExchange_nf(long volatile *_Destination, 826 long _Exchange, long _Comparand) { 827 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 828 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED); 829 return _Comparand; 830} 831static __inline__ short __DEFAULT_FN_ATTRS 832_InterlockedCompareExchange_rel(long volatile *_Destination, 833 long _Exchange, long _Comparand) { 834 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 835 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE); 836 return _Comparand; 837} 838static __inline__ __int64 __DEFAULT_FN_ATTRS 839_InterlockedCompareExchange64_acq(__int64 volatile *_Destination, 840 __int64 _Exchange, __int64 _Comparand) { 841 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 842 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE); 843 return _Comparand; 844} 845static __inline__ __int64 __DEFAULT_FN_ATTRS 846_InterlockedCompareExchange64_nf(__int64 volatile *_Destination, 847 __int64 _Exchange, __int64 _Comparand) { 848 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 849 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED); 850 return _Comparand; 851} 852static __inline__ __int64 __DEFAULT_FN_ATTRS 853_InterlockedCompareExchange64_rel(__int64 volatile *_Destination, 854 __int64 _Exchange, __int64 _Comparand) { 855 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 856 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE); 857 return _Comparand; 858} 859#endif 860 861/*----------------------------------------------------------------------------*\ 862|* movs, stos 863\*----------------------------------------------------------------------------*/ 864#if defined(__i386__) || defined(__x86_64__) 865static __inline__ void __DEFAULT_FN_ATTRS 866__movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) { 867 __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n)); 868} 869static __inline__ void __DEFAULT_FN_ATTRS 870__movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) { 871 __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n)); 872} 873static __inline__ void __DEFAULT_FN_ATTRS 874__movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) { 875 __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n)); 876} 877static __inline__ void __DEFAULT_FN_ATTRS 878__stosd(unsigned long *__dst, unsigned long __x, size_t __n) { 879 __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n)); 880} 881static __inline__ void __DEFAULT_FN_ATTRS 882__stosw(unsigned short *__dst, unsigned short __x, size_t __n) { 883 __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n)); 884} 885#endif 886#ifdef __x86_64__ 887static __inline__ void __DEFAULT_FN_ATTRS 888__movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) { 889 __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n)); 890} 891static __inline__ void __DEFAULT_FN_ATTRS 892__stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) { 893 __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n)); 894} 895#endif 896 897/*----------------------------------------------------------------------------*\ 898|* Misc 899\*----------------------------------------------------------------------------*/ 900#if defined(__i386__) || defined(__x86_64__) 901static __inline__ void __DEFAULT_FN_ATTRS 902__cpuid(int __info[4], int __level) { 903 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3]) 904 : "a"(__level)); 905} 906static __inline__ void __DEFAULT_FN_ATTRS 907__cpuidex(int __info[4], int __level, int __ecx) { 908 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3]) 909 : "a"(__level), "c"(__ecx)); 910} 911static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS 912_xgetbv(unsigned int __xcr_no) { 913 unsigned int __eax, __edx; 914 __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no)); 915 return ((unsigned __int64)__edx << 32) | __eax; 916} 917static __inline__ void __DEFAULT_FN_ATTRS 918__halt(void) { 919 __asm__ volatile ("hlt"); 920} 921static __inline__ void __DEFAULT_FN_ATTRS 922__nop(void) { 923 __asm__ volatile ("nop"); 924} 925#endif 926 927/*----------------------------------------------------------------------------*\ 928|* Privileged intrinsics 929\*----------------------------------------------------------------------------*/ 930#if defined(__i386__) || defined(__x86_64__) 931static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS 932__readmsr(unsigned long __register) { 933 // Loads the contents of a 64-bit model specific register (MSR) specified in 934 // the ECX register into registers EDX:EAX. The EDX register is loaded with 935 // the high-order 32 bits of the MSR and the EAX register is loaded with the 936 // low-order 32 bits. If less than 64 bits are implemented in the MSR being 937 // read, the values returned to EDX:EAX in unimplemented bit locations are 938 // undefined. 939 unsigned long __edx; 940 unsigned long __eax; 941 __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register)); 942 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax; 943} 944 945static __inline__ unsigned long __DEFAULT_FN_ATTRS 946__readcr3(void) { 947 unsigned long __cr3_val; 948 __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory"); 949 return __cr3_val; 950} 951 952static __inline__ void __DEFAULT_FN_ATTRS 953__writecr3(unsigned int __cr3_val) { 954 __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory"); 955} 956#endif 957 958#ifdef __cplusplus 959} 960#endif 961 962#undef __DEFAULT_FN_ATTRS 963 964#endif /* __INTRIN_H */ 965#endif /* _MSC_VER */ 966