sanitizer_atomic_msvc.h revision b1d1ef27db5b6c7788cda3dfd5deb6037dd4a3b8
1//===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file is a part of ThreadSanitizer/AddressSanitizer runtime. 11// Not intended for direct inclusion. Include sanitizer_atomic.h. 12// 13//===----------------------------------------------------------------------===// 14 15#ifndef SANITIZER_ATOMIC_MSVC_H 16#define SANITIZER_ATOMIC_MSVC_H 17 18extern "C" void _ReadWriteBarrier(); 19#pragma intrinsic(_ReadWriteBarrier) 20extern "C" void _mm_mfence(); 21#pragma intrinsic(_mm_mfence) 22extern "C" void _mm_pause(); 23#pragma intrinsic(_mm_pause) 24extern "C" long _InterlockedExchangeAdd( // NOLINT 25 long volatile * Addend, long Value); // NOLINT 26#pragma intrinsic(_InterlockedExchangeAdd) 27 28#ifdef _WIN64 29extern "C" void *_InterlockedCompareExchangePointer( 30 void *volatile *Destination, 31 void *Exchange, void *Comparand); 32#pragma intrinsic(_InterlockedCompareExchangePointer) 33#else 34// There's no _InterlockedCompareExchangePointer intrinsic on x86, 35// so call _InterlockedCompareExchange instead. 36extern "C" 37long __cdecl _InterlockedCompareExchange( // NOLINT 38 long volatile *Destination, // NOLINT 39 long Exchange, long Comparand); // NOLINT 40#pragma intrinsic(_InterlockedCompareExchange) 41 42inline static void *_InterlockedCompareExchangePointer( 43 void *volatile *Destination, 44 void *Exchange, void *Comparand) { 45 return reinterpret_cast<void*>( 46 _InterlockedCompareExchange( 47 reinterpret_cast<long volatile*>(Destination), // NOLINT 48 reinterpret_cast<long>(Exchange), // NOLINT 49 reinterpret_cast<long>(Comparand))); // NOLINT 50} 51#endif 52 53namespace __sanitizer { 54 55INLINE void atomic_signal_fence(memory_order) { 56 _ReadWriteBarrier(); 57} 58 59INLINE void atomic_thread_fence(memory_order) { 60 _mm_mfence(); 61} 62 63INLINE void proc_yield(int cnt) { 64 for (int i = 0; i < cnt; i++) 65 _mm_pause(); 66} 67 68template<typename T> 69INLINE typename T::Type atomic_load( 70 const volatile T *a, memory_order mo) { 71 DCHECK(mo & (memory_order_relaxed | memory_order_consume 72 | memory_order_acquire | memory_order_seq_cst)); 73 DCHECK(!((uptr)a % sizeof(*a))); 74 typename T::Type v; 75 if (mo == memory_order_relaxed) { 76 v = a->val_dont_use; 77 } else { 78 atomic_signal_fence(memory_order_seq_cst); 79 v = a->val_dont_use; 80 atomic_signal_fence(memory_order_seq_cst); 81 } 82 return v; 83} 84 85template<typename T> 86INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) { 87 DCHECK(mo & (memory_order_relaxed | memory_order_release 88 | memory_order_seq_cst)); 89 DCHECK(!((uptr)a % sizeof(*a))); 90 if (mo == memory_order_relaxed) { 91 a->val_dont_use = v; 92 } else { 93 atomic_signal_fence(memory_order_seq_cst); 94 a->val_dont_use = v; 95 atomic_signal_fence(memory_order_seq_cst); 96 } 97 if (mo == memory_order_seq_cst) 98 atomic_thread_fence(memory_order_seq_cst); 99} 100 101INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a, 102 u32 v, memory_order mo) { 103 (void)mo; 104 DCHECK(!((uptr)a % sizeof(*a))); 105 return (u32)_InterlockedExchangeAdd( 106 (volatile long*)&a->val_dont_use, (long)v); // NOLINT 107} 108 109INLINE u8 atomic_exchange(volatile atomic_uint8_t *a, 110 u8 v, memory_order mo) { 111 (void)mo; 112 DCHECK(!((uptr)a % sizeof(*a))); 113 __asm { 114 mov eax, a 115 mov cl, v 116 xchg [eax], cl // NOLINT 117 mov v, cl 118 } 119 return v; 120} 121 122INLINE u16 atomic_exchange(volatile atomic_uint16_t *a, 123 u16 v, memory_order mo) { 124 (void)mo; 125 DCHECK(!((uptr)a % sizeof(*a))); 126 __asm { 127 mov eax, a 128 mov cx, v 129 xchg [eax], cx // NOLINT 130 mov v, cx 131 } 132 return v; 133} 134 135INLINE bool atomic_compare_exchange_strong(volatile atomic_uintptr_t *a, 136 uptr *cmp, 137 uptr xchg, 138 memory_order mo) { 139 uptr cmpv = *cmp; 140 uptr prev = (uptr)_InterlockedCompareExchangePointer( 141 (void*volatile*)&a->val_dont_use, (void*)xchg, (void*)cmpv); 142 if (prev == cmpv) 143 return true; 144 *cmp = prev; 145 return false; 146} 147 148template<typename T> 149INLINE bool atomic_compare_exchange_weak(volatile T *a, 150 typename T::Type *cmp, 151 typename T::Type xchg, 152 memory_order mo) { 153 return atomic_compare_exchange_strong(a, cmp, xchg, mo); 154} 155 156} // namespace __sanitizer 157 158#endif // SANITIZER_ATOMIC_CLANG_H 159