asan_fake_stack.cc revision 7e8434940a1fe7dce531d4c458ccd714da48f609
1//===-- asan_fake_stack.cc ------------------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// FakeStack is used to detect use-after-return bugs.
13//===----------------------------------------------------------------------===//
14#include "asan_allocator.h"
15#include "asan_poisoning.h"
16#include "asan_thread.h"
17
18namespace __asan {
19
20FakeStack::FakeStack() {
21  CHECK(REAL(memset) != 0);
22  REAL(memset)(this, 0, sizeof(*this));
23}
24
25bool FakeStack::AddrIsInSizeClass(uptr addr, uptr size_class) {
26  uptr mem = allocated_size_classes_[size_class];
27  uptr size = ClassMmapSize(size_class);
28  bool res = mem && addr >= mem && addr < mem + size;
29  return res;
30}
31
32uptr FakeStack::AddrIsInFakeStack(uptr addr) {
33  for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
34    if (AddrIsInSizeClass(addr, i)) return allocated_size_classes_[i];
35  }
36  return 0;
37}
38
39// We may want to compute this during compilation.
40inline uptr FakeStack::ComputeSizeClass(uptr alloc_size) {
41  uptr rounded_size = RoundUpToPowerOfTwo(alloc_size);
42  uptr log = Log2(rounded_size);
43  CHECK(alloc_size <= (1UL << log));
44  if (!(alloc_size > (1UL << (log-1)))) {
45    Printf("alloc_size %zu log %zu\n", alloc_size, log);
46  }
47  CHECK(alloc_size > (1UL << (log-1)));
48  uptr res = log < kMinStackFrameSizeLog ? 0 : log - kMinStackFrameSizeLog;
49  CHECK(res < kNumberOfSizeClasses);
50  CHECK(ClassSize(res) >= rounded_size);
51  return res;
52}
53
54void FakeFrameFifo::FifoPush(FakeFrame *node) {
55  CHECK(node);
56  node->next = 0;
57  if (first_ == 0 && last_ == 0) {
58    first_ = last_ = node;
59  } else {
60    CHECK(first_);
61    CHECK(last_);
62    last_->next = node;
63    last_ = node;
64  }
65}
66
67FakeFrame *FakeFrameFifo::FifoPop() {
68  CHECK(first_ && last_ && "Exhausted fake stack");
69  FakeFrame *res = 0;
70  if (first_ == last_) {
71    res = first_;
72    first_ = last_ = 0;
73  } else {
74    res = first_;
75    first_ = first_->next;
76  }
77  return res;
78}
79
80void FakeStack::Init(uptr stack_size) {
81  stack_size_ = stack_size;
82  alive_ = true;
83}
84
85void FakeStack::Cleanup() {
86  alive_ = false;
87  for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
88    uptr mem = allocated_size_classes_[i];
89    if (mem) {
90      PoisonShadow(mem, ClassMmapSize(i), 0);
91      allocated_size_classes_[i] = 0;
92      UnmapOrDie((void*)mem, ClassMmapSize(i));
93    }
94  }
95}
96
97uptr FakeStack::ClassMmapSize(uptr size_class) {
98  return RoundUpToPowerOfTwo(stack_size_);
99}
100
101void FakeStack::AllocateOneSizeClass(uptr size_class) {
102  CHECK(ClassMmapSize(size_class) >= GetPageSizeCached());
103  uptr new_mem = (uptr)MmapOrDie(
104      ClassMmapSize(size_class), __FUNCTION__);
105  // Printf("T%d new_mem[%zu]: %p-%p mmap %zu\n",
106  //       GetCurrentThread()->tid(),
107  //       size_class, new_mem, new_mem + ClassMmapSize(size_class),
108  //       ClassMmapSize(size_class));
109  uptr i;
110  for (i = 0; i < ClassMmapSize(size_class);
111       i += ClassSize(size_class)) {
112    size_classes_[size_class].FifoPush((FakeFrame*)(new_mem + i));
113  }
114  CHECK(i == ClassMmapSize(size_class));
115  allocated_size_classes_[size_class] = new_mem;
116}
117
118uptr FakeStack::AllocateStack(uptr size, uptr real_stack) {
119  if (!alive_) return real_stack;
120  CHECK(size <= kMaxStackMallocSize && size > 1);
121  uptr size_class = ComputeSizeClass(size);
122  if (!allocated_size_classes_[size_class]) {
123    AllocateOneSizeClass(size_class);
124  }
125  FakeFrame *fake_frame = size_classes_[size_class].FifoPop();
126  CHECK(fake_frame);
127  fake_frame->size_minus_one = size - 1;
128  fake_frame->real_stack = real_stack;
129  while (FakeFrame *top = call_stack_.top()) {
130    if (top->real_stack > real_stack) break;
131    call_stack_.LifoPop();
132    DeallocateFrame(top);
133  }
134  call_stack_.LifoPush(fake_frame);
135  uptr ptr = (uptr)fake_frame;
136  PoisonShadow(ptr, size, 0);
137  return ptr;
138}
139
140void FakeStack::DeallocateFrame(FakeFrame *fake_frame) {
141  CHECK(alive_);
142  uptr size = fake_frame->size_minus_one + 1;
143  uptr size_class = ComputeSizeClass(size);
144  CHECK(allocated_size_classes_[size_class]);
145  uptr ptr = (uptr)fake_frame;
146  CHECK(AddrIsInSizeClass(ptr, size_class));
147  CHECK(AddrIsInSizeClass(ptr + size - 1, size_class));
148  size_classes_[size_class].FifoPush(fake_frame);
149}
150
151void FakeStack::OnFree(uptr ptr, uptr size, uptr real_stack) {
152  FakeFrame *fake_frame = (FakeFrame*)ptr;
153  CHECK(fake_frame->magic = kRetiredStackFrameMagic);
154  CHECK(fake_frame->descr != 0);
155  CHECK(fake_frame->size_minus_one == size - 1);
156  PoisonShadow(ptr, size, kAsanStackAfterReturnMagic);
157}
158
159}  // namespace __asan
160
161// ---------------------- Interface ---------------- {{{1
162using namespace __asan;  // NOLINT
163
164uptr __asan_stack_malloc(uptr size, uptr real_stack) {
165  if (!flags()->use_fake_stack) return real_stack;
166  AsanThread *t = GetCurrentThread();
167  if (!t) {
168    // TSD is gone, use the real stack.
169    return real_stack;
170  }
171  uptr ptr = t->fake_stack().AllocateStack(size, real_stack);
172  // Printf("__asan_stack_malloc %p %zu %p\n", ptr, size, real_stack);
173  return ptr;
174}
175
176void __asan_stack_free(uptr ptr, uptr size, uptr real_stack) {
177  if (!flags()->use_fake_stack) return;
178  if (ptr != real_stack) {
179    FakeStack::OnFree(ptr, size, real_stack);
180  }
181}
182