asan_fake_stack.cc revision 5e97ba38b00eb843a55189bb913b445cbe620894
1//===-- asan_fake_stack.cc ------------------------------------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file is a part of AddressSanitizer, an address sanity checker. 11// 12// FakeStack is used to detect use-after-return bugs. 13//===----------------------------------------------------------------------===// 14#include "asan_allocator.h" 15#include "asan_poisoning.h" 16#include "asan_thread.h" 17 18namespace __asan { 19 20FakeStack::FakeStack() { 21 CHECK(REAL(memset)); 22 REAL(memset)(this, 0, sizeof(*this)); 23} 24 25bool FakeStack::AddrIsInSizeClass(uptr addr, uptr size_class) { 26 uptr mem = allocated_size_classes_[size_class]; 27 uptr size = ClassMmapSize(size_class); 28 bool res = mem && addr >= mem && addr < mem + size; 29 return res; 30} 31 32uptr FakeStack::AddrIsInFakeStack(uptr addr) { 33 for (uptr size_class = 0; size_class < kNumberOfSizeClasses; size_class++) { 34 if (!AddrIsInSizeClass(addr, size_class)) continue; 35 uptr size_class_first_ptr = allocated_size_classes_[size_class]; 36 uptr size = ClassSize(size_class); 37 CHECK_LE(size_class_first_ptr, addr); 38 CHECK_GT(size_class_first_ptr + ClassMmapSize(size_class), addr); 39 return size_class_first_ptr + ((addr - size_class_first_ptr) / size) * size; 40 } 41 return 0; 42} 43 44// We may want to compute this during compilation. 45ALWAYS_INLINE uptr FakeStack::ComputeSizeClass(uptr alloc_size) { 46 uptr rounded_size = RoundUpToPowerOfTwo(alloc_size); 47 uptr log = Log2(rounded_size); 48 CHECK_LE(alloc_size, (1UL << log)); 49 CHECK_GT(alloc_size, (1UL << (log-1))); 50 uptr res = log < kMinStackFrameSizeLog ? 0 : log - kMinStackFrameSizeLog; 51 CHECK_LT(res, kNumberOfSizeClasses); 52 CHECK_GE(ClassSize(res), rounded_size); 53 return res; 54} 55 56void FakeFrameFifo::FifoPush(FakeFrame *node) { 57 CHECK(node); 58 node->next = 0; 59 if (first_ == 0 && last_ == 0) { 60 first_ = last_ = node; 61 } else { 62 CHECK(first_); 63 CHECK(last_); 64 last_->next = node; 65 last_ = node; 66 } 67} 68 69FakeFrame *FakeFrameFifo::FifoPop() { 70 CHECK(first_ && last_ && "Exhausted fake stack"); 71 FakeFrame *res = 0; 72 if (first_ == last_) { 73 res = first_; 74 first_ = last_ = 0; 75 } else { 76 res = first_; 77 first_ = first_->next; 78 } 79 return res; 80} 81 82void FakeStack::Init(uptr stack_size) { 83 stack_size_ = stack_size; 84 alive_ = true; 85} 86 87void FakeStack::Cleanup() { 88 alive_ = false; 89 for (uptr i = 0; i < kNumberOfSizeClasses; i++) { 90 uptr mem = allocated_size_classes_[i]; 91 if (mem) { 92 PoisonShadow(mem, ClassMmapSize(i), 0); 93 allocated_size_classes_[i] = 0; 94 UnmapOrDie((void*)mem, ClassMmapSize(i)); 95 } 96 } 97} 98 99uptr FakeStack::ClassMmapSize(uptr size_class) { 100 return RoundUpToPowerOfTwo(stack_size_); 101} 102 103void FakeStack::AllocateOneSizeClass(uptr size_class) { 104 CHECK(ClassMmapSize(size_class) >= GetPageSizeCached()); 105 uptr new_mem = (uptr)MmapOrDie( 106 ClassMmapSize(size_class), __FUNCTION__); 107 // Printf("T%d new_mem[%zu]: %p-%p mmap %zu\n", 108 // GetCurrentThread()->tid(), 109 // size_class, new_mem, new_mem + ClassMmapSize(size_class), 110 // ClassMmapSize(size_class)); 111 uptr i; 112 for (i = 0; i < ClassMmapSize(size_class); 113 i += ClassSize(size_class)) { 114 size_classes_[size_class].FifoPush((FakeFrame*)(new_mem + i)); 115 } 116 CHECK(i == ClassMmapSize(size_class)); 117 allocated_size_classes_[size_class] = new_mem; 118} 119 120ALWAYS_INLINE uptr FakeStack::AllocateStack(uptr size, uptr real_stack) { 121 if (!alive_) return real_stack; 122 CHECK(size <= kMaxStackMallocSize && size > 1); 123 uptr size_class = ComputeSizeClass(size); 124 if (!allocated_size_classes_[size_class]) { 125 AllocateOneSizeClass(size_class); 126 } 127 FakeFrame *fake_frame = size_classes_[size_class].FifoPop(); 128 CHECK(fake_frame); 129 fake_frame->size_minus_one = size - 1; 130 fake_frame->real_stack = real_stack; 131 while (FakeFrame *top = call_stack_.top()) { 132 if (top->real_stack > real_stack) break; 133 call_stack_.LifoPop(); 134 DeallocateFrame(top); 135 } 136 call_stack_.LifoPush(fake_frame); 137 uptr ptr = (uptr)fake_frame; 138 PoisonShadow(ptr, size, 0); 139 return ptr; 140} 141 142ALWAYS_INLINE void FakeStack::DeallocateFrame(FakeFrame *fake_frame) { 143 CHECK(alive_); 144 uptr size = static_cast<uptr>(fake_frame->size_minus_one + 1); 145 uptr size_class = ComputeSizeClass(size); 146 CHECK(allocated_size_classes_[size_class]); 147 uptr ptr = (uptr)fake_frame; 148 CHECK(AddrIsInSizeClass(ptr, size_class)); 149 CHECK(AddrIsInSizeClass(ptr + size - 1, size_class)); 150 size_classes_[size_class].FifoPush(fake_frame); 151} 152 153ALWAYS_INLINE void FakeStack::OnFree(uptr ptr, uptr size, uptr real_stack) { 154 FakeFrame *fake_frame = (FakeFrame*)ptr; 155 CHECK_EQ(fake_frame->magic, kRetiredStackFrameMagic); 156 CHECK_NE(fake_frame->descr, 0); 157 CHECK_EQ(fake_frame->size_minus_one, size - 1); 158 PoisonShadow(ptr, size, kAsanStackAfterReturnMagic); 159} 160 161} // namespace __asan 162 163// ---------------------- Interface ---------------- {{{1 164using namespace __asan; // NOLINT 165 166uptr __asan_stack_malloc(uptr size, uptr real_stack) { 167 if (!flags()->use_fake_stack) return real_stack; 168 AsanThread *t = GetCurrentThread(); 169 if (!t) { 170 // TSD is gone, use the real stack. 171 return real_stack; 172 } 173 uptr ptr = t->fake_stack().AllocateStack(size, real_stack); 174 // Printf("__asan_stack_malloc %p %zu %p\n", ptr, size, real_stack); 175 return ptr; 176} 177 178void __asan_stack_free(uptr ptr, uptr size, uptr real_stack) { 179 if (!flags()->use_fake_stack) return; 180 if (ptr != real_stack) { 181 FakeStack::OnFree(ptr, size, real_stack); 182 } 183} 184