asan_fake_stack.cc revision b1173c27c2791aef27304e68911a11648401064d
1//===-- asan_fake_stack.cc ------------------------------------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file is a part of AddressSanitizer, an address sanity checker. 11// 12// FakeStack is used to detect use-after-return bugs. 13//===----------------------------------------------------------------------===// 14#include "asan_allocator.h" 15#include "asan_poisoning.h" 16#include "asan_thread.h" 17 18namespace __asan { 19 20void FakeStack::PoisonAll(u8 magic) { 21 PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()), 22 magic); 23} 24 25ALWAYS_INLINE USED 26FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id, 27 uptr real_stack) { 28 CHECK_LT(class_id, kNumberOfSizeClasses); 29 if (needs_gc_) 30 GC(real_stack); 31 uptr &hint_position = hint_position_[class_id]; 32 const int num_iter = NumberOfFrames(stack_size_log, class_id); 33 u8 *flags = GetFlags(stack_size_log, class_id); 34 for (int i = 0; i < num_iter; i++) { 35 uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++); 36 if (flags[pos]) continue; 37 // FIXME: this does not have to be thread-safe, just async-signal-safe. 38 if (0 == atomic_exchange((atomic_uint8_t *)&flags[pos], 1, 39 memory_order_relaxed)) { 40 FakeFrame *res = reinterpret_cast<FakeFrame *>( 41 GetFrame(stack_size_log, class_id, pos)); 42 res->real_stack = real_stack; 43 res->class_id = class_id; 44 allocated_from_size_class_mask_ |= 1UL << class_id; 45 return res; 46 } 47 } 48 CHECK(0 && "Failed to allocate a fake stack frame"); 49 return 0; 50} 51 52ALWAYS_INLINE USED 53void FakeStack::Deallocate(FakeFrame *ff, uptr stack_size_log, uptr class_id, 54 uptr real_stack) { 55 u8 *base = GetFrame(stack_size_log, class_id, 0); 56 u8 *cur = reinterpret_cast<u8 *>(ff); 57 CHECK_LE(base, cur); 58 CHECK_LT(cur, base + (1UL << stack_size_log)); 59 uptr pos = (cur - base) >> (kMinStackFrameSizeLog + class_id); 60 u8 *flags = GetFlags(stack_size_log, class_id); 61 CHECK_EQ(flags[pos], 1); 62 flags[pos] = 0; 63} 64 65uptr FakeStack::AddrIsInFakeStack(uptr ptr) { 66 uptr stack_size_log = this->stack_size_log(); 67 uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0)); 68 uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log); 69 if (ptr < beg || ptr >= end) return 0; 70 uptr class_id = (ptr - beg) >> stack_size_log; 71 uptr base = beg + (class_id << stack_size_log); 72 CHECK_LE(base, ptr); 73 CHECK_LT(ptr, base + (1UL << stack_size_log)); 74 uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id); 75 return base + pos * BytesInSizeClass(class_id); 76} 77 78void FakeStack::HandleNoReturn() { 79 needs_gc_ = true; 80} 81 82// When throw, longjmp or some such happens we don't call OnFree() and 83// as the result may leak one or more fake frames, but the good news is that 84// we are notified about all such events by HandleNoReturn(). 85// If we recently had such no-return event we need to collect garbage frames. 86// We do it based on their 'real_stack' values -- everything that is lower 87// than the current real_stack is garbage. 88NOINLINE void FakeStack::GC(uptr real_stack) { 89 uptr collected = 0; 90 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) { 91 if (!(allocated_from_size_class_mask_ & (1UL << class_id))) continue; 92 u8 *flags = GetFlags(stack_size_log(), class_id); 93 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n; 94 i++) { 95 if (flags[i] == 0) continue; // not allocated. 96 FakeFrame *ff = reinterpret_cast<FakeFrame *>( 97 GetFrame(stack_size_log(), class_id, i)); 98 if (ff->real_stack < real_stack) { 99 flags[i] = 0; 100 collected++; 101 } 102 } 103 } 104 needs_gc_ = false; 105} 106 107ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size, uptr real_stack) { 108 AsanThread *t = GetCurrentThread(); 109 if (!t) return real_stack; 110 FakeStack *fs = t->fake_stack(); 111 if (!fs) return real_stack; 112 FakeFrame *ff = fs->Allocate(fs->stack_size_log(), class_id, real_stack); 113 uptr ptr = reinterpret_cast<uptr>(ff); 114 PoisonShadow(ptr, size, 0); 115 return ptr; 116} 117 118ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size, uptr real_stack) { 119 if (ptr == real_stack) 120 return; 121 AsanThread *t = GetCurrentThread(); 122 if (!t) return; 123 FakeStack *fs = t->fake_stack(); 124 FakeFrame *ff = reinterpret_cast<FakeFrame *>(ptr); 125 fs->Deallocate(ff, fs->stack_size_log(), class_id, real_stack); 126 PoisonShadow(ptr, size, kAsanStackAfterReturnMagic); 127} 128 129} // namespace __asan 130 131// ---------------------- Interface ---------------- {{{1 132#define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \ 133 extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \ 134 __asan_stack_malloc_##class_id(uptr size, uptr real_stack) { \ 135 return __asan::OnMalloc(class_id, size, real_stack); \ 136 } \ 137 extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \ 138 uptr ptr, uptr size, uptr real_stack) { \ 139 __asan::OnFree(ptr, class_id, size, real_stack); \ 140 } 141 142DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0) 143DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1) 144DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2) 145DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3) 146DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4) 147DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5) 148DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6) 149DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7) 150DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8) 151DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9) 152DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10) 153