asan_fake_stack.cc revision c519335c2d6d32acaac32c0595f08a05081567e7
1//===-- asan_fake_stack.cc ------------------------------------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file is a part of AddressSanitizer, an address sanity checker. 11// 12// FakeStack is used to detect use-after-return bugs. 13//===----------------------------------------------------------------------===// 14#include "asan_allocator.h" 15#include "asan_poisoning.h" 16#include "asan_thread.h" 17 18namespace __asan { 19 20static const u64 kMagic1 = kAsanStackAfterReturnMagic; 21static const u64 kMagic2 = (kMagic1 << 8) | kMagic1; 22static const u64 kMagic4 = (kMagic2 << 16) | kMagic2; 23static const u64 kMagic8 = (kMagic4 << 32) | kMagic4; 24 25// For small size classes inline PoisonShadow for better performance. 26ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) { 27 CHECK_EQ(SHADOW_SCALE, 3); // This code expects SHADOW_SCALE=3. 28 u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr)); 29 if (class_id <= 6) { 30 for (uptr i = 0; i < (1U << class_id); i++) 31 shadow[i] = magic; 32 } else { 33 // The size class is too big, it's cheaper to poison only size bytes. 34 PoisonShadow(ptr, size, static_cast<u8>(magic)); 35 } 36} 37 38FakeStack *FakeStack::Create(uptr stack_size_log) { 39 static uptr kMinStackSizeLog = 16; 40 static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28); 41 if (stack_size_log < kMinStackSizeLog) 42 stack_size_log = kMinStackSizeLog; 43 if (stack_size_log > kMaxStackSizeLog) 44 stack_size_log = kMaxStackSizeLog; 45 FakeStack *res = reinterpret_cast<FakeStack *>( 46 MmapOrDie(RequiredSize(stack_size_log), "FakeStack")); 47 res->stack_size_log_ = stack_size_log; 48 if (flags()->verbosity) { 49 u8 *p = reinterpret_cast<u8 *>(res); 50 Report("T%d: FakeStack created: %p -- %p stack_size_log: %zd \n", 51 GetCurrentTidOrInvalid(), p, 52 p + FakeStack::RequiredSize(stack_size_log), stack_size_log); 53 } 54 return res; 55} 56 57void FakeStack::Destroy() { 58 PoisonAll(0); 59 UnmapOrDie(this, RequiredSize(stack_size_log_)); 60} 61 62void FakeStack::PoisonAll(u8 magic) { 63 PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()), 64 magic); 65} 66 67ALWAYS_INLINE USED 68FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id, 69 uptr real_stack) { 70 CHECK_LT(class_id, kNumberOfSizeClasses); 71 if (needs_gc_) 72 GC(real_stack); 73 uptr &hint_position = hint_position_[class_id]; 74 const int num_iter = NumberOfFrames(stack_size_log, class_id); 75 u8 *flags = GetFlags(stack_size_log, class_id); 76 for (int i = 0; i < num_iter; i++) { 77 uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++); 78 // This part is tricky. On one hand, checking and setting flags[pos] 79 // should be atomic to ensure async-signal safety. But on the other hand, 80 // if the signal arrives between checking and setting flags[pos], the 81 // signal handler's fake stack will start from a different hint_position 82 // and so will not touch this particular byte. So, it is safe to do this 83 // with regular non-atimic load and store (at least I was not able to make 84 // this code crash). 85 if (flags[pos]) continue; 86 flags[pos] = 1; 87 FakeFrame *res = reinterpret_cast<FakeFrame *>( 88 GetFrame(stack_size_log, class_id, pos)); 89 res->real_stack = real_stack; 90 *SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos]; 91 return res; 92 } 93 return 0; // We are out of fake stack. 94} 95 96uptr FakeStack::AddrIsInFakeStack(uptr ptr) { 97 uptr stack_size_log = this->stack_size_log(); 98 uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0)); 99 uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log); 100 if (ptr < beg || ptr >= end) return 0; 101 uptr class_id = (ptr - beg) >> stack_size_log; 102 uptr base = beg + (class_id << stack_size_log); 103 CHECK_LE(base, ptr); 104 CHECK_LT(ptr, base + (1UL << stack_size_log)); 105 uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id); 106 return base + pos * BytesInSizeClass(class_id); 107} 108 109void FakeStack::HandleNoReturn() { 110 needs_gc_ = true; 111} 112 113// When throw, longjmp or some such happens we don't call OnFree() and 114// as the result may leak one or more fake frames, but the good news is that 115// we are notified about all such events by HandleNoReturn(). 116// If we recently had such no-return event we need to collect garbage frames. 117// We do it based on their 'real_stack' values -- everything that is lower 118// than the current real_stack is garbage. 119NOINLINE void FakeStack::GC(uptr real_stack) { 120 uptr collected = 0; 121 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) { 122 u8 *flags = GetFlags(stack_size_log(), class_id); 123 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n; 124 i++) { 125 if (flags[i] == 0) continue; // not allocated. 126 FakeFrame *ff = reinterpret_cast<FakeFrame *>( 127 GetFrame(stack_size_log(), class_id, i)); 128 if (ff->real_stack < real_stack) { 129 flags[i] = 0; 130 collected++; 131 } 132 } 133 } 134 needs_gc_ = false; 135} 136 137void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) { 138 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) { 139 u8 *flags = GetFlags(stack_size_log(), class_id); 140 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n; 141 i++) { 142 if (flags[i] == 0) continue; // not allocated. 143 FakeFrame *ff = reinterpret_cast<FakeFrame *>( 144 GetFrame(stack_size_log(), class_id, i)); 145 uptr begin = reinterpret_cast<uptr>(ff); 146 callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg); 147 } 148 } 149} 150 151#if SANITIZER_LINUX && !SANITIZER_ANDROID 152static THREADLOCAL FakeStack *fake_stack_tls; 153 154FakeStack *GetTLSFakeStack() { 155 return fake_stack_tls; 156} 157void SetTLSFakeStack(FakeStack *fs) { 158 fake_stack_tls = fs; 159} 160#else 161FakeStack *GetTLSFakeStack() { return 0; } 162void SetTLSFakeStack(FakeStack *fs) { } 163#endif // SANITIZER_LINUX && !SANITIZER_ANDROID 164 165static FakeStack *GetFakeStack() { 166 AsanThread *t = GetCurrentThread(); 167 if (!t) return 0; 168 return t->fake_stack(); 169} 170 171static FakeStack *GetFakeStackFast() { 172 if (FakeStack *fs = GetTLSFakeStack()) 173 return fs; 174 if (!__asan_option_detect_stack_use_after_return) 175 return 0; 176 return GetFakeStack(); 177} 178 179ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size, uptr real_stack) { 180 FakeStack *fs = GetFakeStackFast(); 181 if (!fs) return real_stack; 182 FakeFrame *ff = fs->Allocate(fs->stack_size_log(), class_id, real_stack); 183 if (!ff) 184 return real_stack; // Out of fake stack, return the real one. 185 uptr ptr = reinterpret_cast<uptr>(ff); 186 SetShadow(ptr, size, class_id, 0); 187 return ptr; 188} 189 190ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size, uptr real_stack) { 191 if (ptr == real_stack) 192 return; 193 FakeStack::Deallocate(ptr, class_id); 194 SetShadow(ptr, size, class_id, kMagic8); 195} 196 197} // namespace __asan 198 199// ---------------------- Interface ---------------- {{{1 200#define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \ 201 extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \ 202 __asan_stack_malloc_##class_id(uptr size, uptr real_stack) { \ 203 return __asan::OnMalloc(class_id, size, real_stack); \ 204 } \ 205 extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \ 206 uptr ptr, uptr size, uptr real_stack) { \ 207 __asan::OnFree(ptr, class_id, size, real_stack); \ 208 } 209 210DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0) 211DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1) 212DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2) 213DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3) 214DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4) 215DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5) 216DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6) 217DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7) 218DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8) 219DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9) 220DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10) 221