asan_fake_stack.cc revision 89de457bd3ec40d38bc7860f88f1d4da473eacc4
1//===-- asan_fake_stack.cc ------------------------------------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file is a part of AddressSanitizer, an address sanity checker. 11// 12// FakeStack is used to detect use-after-return bugs. 13//===----------------------------------------------------------------------===// 14#include "asan_allocator.h" 15#include "asan_poisoning.h" 16#include "asan_thread.h" 17 18namespace __asan { 19 20void FakeStack::PoisonAll(u8 magic) { 21 PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()), 22 magic); 23} 24 25FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id, 26 uptr real_stack) { 27 CHECK_LT(class_id, kNumberOfSizeClasses); 28 if (needs_gc_) 29 GC(real_stack); 30 uptr &hint_position = hint_position_[class_id]; 31 const int num_iter = NumberOfFrames(stack_size_log, class_id); 32 u8 *flags = GetFlags(stack_size_log, class_id); 33 for (int i = 0; i < num_iter; i++) { 34 uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++); 35 if (flags[pos]) continue; 36 // FIXME: this does not have to be thread-safe, just async-signal-safe. 37 if (0 == atomic_exchange((atomic_uint8_t *)&flags[pos], 1, 38 memory_order_relaxed)) { 39 FakeFrame *res = reinterpret_cast<FakeFrame *>( 40 GetFrame(stack_size_log, class_id, pos)); 41 res->real_stack = real_stack; 42 res->class_id = class_id; 43 allocated_from_size_class_mask_ |= 1UL << class_id; 44 return res; 45 } 46 } 47 CHECK(0 && "Failed to allocate a fake stack frame"); 48 return 0; 49} 50 51void FakeStack::Deallocate(FakeFrame *ff, uptr stack_size_log, uptr class_id, 52 uptr real_stack) { 53 u8 *base = GetFrame(stack_size_log, class_id, 0); 54 u8 *cur = reinterpret_cast<u8 *>(ff); 55 CHECK_LE(base, cur); 56 CHECK_LT(cur, base + (1UL << stack_size_log)); 57 uptr pos = (cur - base) >> (kMinStackFrameSizeLog + class_id); 58 u8 *flags = GetFlags(stack_size_log, class_id); 59 CHECK_EQ(flags[pos], 1); 60 flags[pos] = 0; 61} 62 63uptr FakeStack::AddrIsInFakeStack(uptr ptr) { 64 uptr stack_size_log = this->stack_size_log(); 65 uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0)); 66 uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log); 67 if (ptr < beg || ptr >= end) return 0; 68 uptr class_id = (ptr - beg) >> stack_size_log; 69 uptr base = beg + (class_id << stack_size_log); 70 CHECK_LE(base, ptr); 71 CHECK_LT(ptr, base + (1UL << stack_size_log)); 72 uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id); 73 return base + pos * BytesInSizeClass(class_id); 74} 75 76void FakeStack::HandleNoReturn() { 77 needs_gc_ = true; 78} 79 80// When throw, longjmp or some such happens we don't call OnFree() and 81// as the result may leak one or more fake frames, but the good news is that 82// we are notified about all such events by HandleNoReturn(). 83// If we recently had such no-return event we need to collect garbage frames. 84// We do it based on their 'real_stack' values -- everything that is lower 85// than the current real_stack is garbage. 86NOINLINE void FakeStack::GC(uptr real_stack) { 87 uptr collected = 0; 88 for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) { 89 if (!(allocated_from_size_class_mask_ & (1UL << class_id))) continue; 90 u8 *flags = GetFlags(stack_size_log(), class_id); 91 for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n; 92 i++) { 93 if (flags[i] == 0) continue; // not allocated. 94 FakeFrame *ff = reinterpret_cast<FakeFrame *>( 95 GetFrame(stack_size_log(), class_id, i)); 96 if (ff->real_stack < real_stack) { 97 flags[i] = 0; 98 collected++; 99 } 100 } 101 } 102 needs_gc_ = false; 103} 104 105ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size, uptr real_stack) { 106 AsanThread *t = GetCurrentThread(); 107 if (!t) return real_stack; 108 FakeStack *fs = t->fake_stack(); 109 if (!fs) return real_stack; 110 FakeFrame *ff = fs->Allocate(fs->stack_size_log(), class_id, real_stack); 111 uptr ptr = reinterpret_cast<uptr>(ff); 112 PoisonShadow(ptr, size, 0); 113 return ptr; 114} 115 116ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size, uptr real_stack) { 117 if (ptr == real_stack) 118 return; 119 AsanThread *t = GetCurrentThread(); 120 if (!t) return; 121 FakeStack *fs = t->fake_stack(); 122 FakeFrame *ff = reinterpret_cast<FakeFrame *>(ptr); 123 fs->Deallocate(ff, fs->stack_size_log(), class_id, real_stack); 124 PoisonShadow(ptr, size, kAsanStackAfterReturnMagic); 125} 126 127} // namespace __asan 128 129// ---------------------- Interface ---------------- {{{1 130#define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id) \ 131 extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr \ 132 __asan_stack_malloc_##class_id(uptr size, uptr real_stack) { \ 133 return __asan::OnMalloc(class_id, size, real_stack); \ 134 } \ 135 extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \ 136 uptr ptr, uptr size, uptr real_stack) { \ 137 __asan::OnFree(ptr, class_id, size, real_stack); \ 138 } 139 140DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0) 141DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1) 142DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2) 143DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3) 144DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4) 145DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5) 146DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6) 147DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7) 148DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8) 149DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9) 150DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10) 151