Lines Matching refs:__int64

65 __int64 __emul(int, int);
66 unsigned __int64 __emulu(unsigned int, unsigned int);
83 unsigned __int64 __ll_lshift(unsigned __int64, int);
84 __int64 __ll_rshift(__int64, int);
122 unsigned __int64 __readfsqword(unsigned long);
127 unsigned __int64 __readmsr(unsigned long);
128 unsigned __int64 __readpmc(unsigned long);
146 unsigned __int64 __ull_rshift(unsigned __int64, int);
148 void __vmx_vmptrst(unsigned __int64 *);
158 void __writefsqword(unsigned long, unsigned __int64);
160 void __writemsr(unsigned long, unsigned __int64);
175 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
182 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
201 __int64 _InterlockedCompareExchange64(__int64 volatile *_Destination,
202 __int64 _Exchange, __int64 _Comparand);
203 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
204 __int64);
205 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
206 __int64);
229 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
230 __int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
267 unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
275 unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
285 void _Store64_HLERelease(__int64 volatile *, __int64);
293 unsigned __int64 __cdecl _xgetbv(unsigned int);
294 void __cdecl _xrstor(void const *, unsigned __int64);
295 void __cdecl _xsave(void *, unsigned __int64);
296 void __cdecl _xsaveopt(void *, unsigned __int64);
297 void __cdecl _xsetbv(unsigned int, unsigned __int64);
303 void __addgsqword(unsigned long, unsigned __int64);
311 unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
312 void __lwpval64(unsigned __int64, unsigned int, unsigned int);
313 unsigned __int64 __lzcnt64(unsigned __int64);
316 __int64 __mulh(__int64, __int64);
318 unsigned __int64 __popcnt64(unsigned __int64);
324 unsigned __int64 __readgsqword(unsigned long);
326 unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
327 unsigned __int64 _HighPart,
329 unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
330 unsigned __int64 _HighPart,
333 void __stosq(unsigned __int64 *, unsigned __int64, size_t);
334 unsigned char __vmx_on(unsigned __int64 *);
335 unsigned char __vmx_vmclear(unsigned __int64 *);
337 unsigned char __vmx_vmptrld(unsigned __int64 *);
343 void __writegsqword(unsigned long, unsigned __int64);
346 unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
348 unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
350 unsigned char _bittest64(__int64 const *, __int64);
352 unsigned char _bittestandcomplement64(__int64 *, __int64);
354 unsigned char _bittestandreset64(__int64 *, __int64);
356 unsigned char _bittestandset64(__int64 *, __int64);
357 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
362 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
364 unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
366 unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
369 unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
370 __int64 _ExchangeHigh,
371 __int64 _ExchangeLow,
372 __int64 *_CompareandResult);
373 unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
374 __int64 _ExchangeHigh,
375 __int64 _ExchangeLow,
376 __int64 *_ComparandResult);
379 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
380 __int64);
381 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
382 __int64);
383 __int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
384 __int64 _Exchange, __int64 _Comparand);
390 __int64 _InterlockedDecrement64(__int64 volatile *_Addend);
392 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
394 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
397 __int64 _InterlockedIncrement64(__int64 volatile *_Addend);
401 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
402 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
407 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
408 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
411 __int64 _mul128(__int64 _Multiplier, __int64 _Multiplicand,
412 __int64 *_HighProduct);
413 unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
414 __int64 _sarx_i64(__int64, unsigned int);
418 unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
419 unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
424 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
425 _umul128(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand,
426 unsigned __int64 *_HighProduct) {
432 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
433 __umulh(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand) {
438 void __cdecl _xrstor64(void const *, unsigned __int64);
439 void __cdecl _xsave64(void *, unsigned __int64);
440 void __cdecl _xsaveopt64(void *, unsigned __int64);
488 __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
489 _rotl64(unsigned __int64 _Value, int _Shift) {
494 __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
495 _rotr64(unsigned __int64 _Value, int _Shift) {
560 _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
567 _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
574 unsigned __int64 __attribute__((__always_inline__, __nodebug__))
575 __popcnt64(unsigned __int64 value) {
579 _bittest64(__int64 const *a, __int64 b) {
583 _bittestandcomplement64(__int64 *a, __int64 b) {
589 _bittestandreset64(__int64 *a, __int64 b) {
595 _bittestandset64(__int64 *a, __int64 b) {
601 _interlockedbittestandset64(__int64 volatile *__BitBase, __int64 __BitPos) {
623 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
624 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
644 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
645 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
657 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
658 _InterlockedIncrement64(__int64 volatile *_Value) {
670 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
671 _InterlockedDecrement64(__int64 volatile *_Value) {
691 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
692 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
712 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
713 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
733 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
734 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
752 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
753 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
773 static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
774 _InterlockedCompareExchange64(__int64 volatile *_Destination,
775 __int64 _Exchange, __int64 _Comparand) {
819 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
821 return *__ptr_to_addr_space(257, unsigned __int64, __offset);
837 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
839 return *__ptr_to_addr_space(256, unsigned __int64, __offset);
889 __stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
917 static __inline__ unsigned __int64 __cdecl __attribute__((__always_inline__, __nodebug__))
921 return ((unsigned __int64)__edx << 32) | __eax;
933 static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
944 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;