asan_fake_stack.cc revision def1be9b7ef4091ce465c0fbfb26cdb52128ade8
1//===-- asan_fake_stack.cc ------------------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// FakeStack is used to detect use-after-return bugs.
13//===----------------------------------------------------------------------===//
14#include "asan_allocator.h"
15#include "asan_thread.h"
16
17namespace __asan {
18
19FakeStack::FakeStack() {
20  CHECK(REAL(memset) != 0);
21  REAL(memset)(this, 0, sizeof(*this));
22}
23
24bool FakeStack::AddrIsInSizeClass(uptr addr, uptr size_class) {
25  uptr mem = allocated_size_classes_[size_class];
26  uptr size = ClassMmapSize(size_class);
27  bool res = mem && addr >= mem && addr < mem + size;
28  return res;
29}
30
31uptr FakeStack::AddrIsInFakeStack(uptr addr) {
32  for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
33    if (AddrIsInSizeClass(addr, i)) return allocated_size_classes_[i];
34  }
35  return 0;
36}
37
38// We may want to compute this during compilation.
39inline uptr FakeStack::ComputeSizeClass(uptr alloc_size) {
40  uptr rounded_size = RoundUpToPowerOfTwo(alloc_size);
41  uptr log = Log2(rounded_size);
42  CHECK(alloc_size <= (1UL << log));
43  if (!(alloc_size > (1UL << (log-1)))) {
44    Printf("alloc_size %zu log %zu\n", alloc_size, log);
45  }
46  CHECK(alloc_size > (1UL << (log-1)));
47  uptr res = log < kMinStackFrameSizeLog ? 0 : log - kMinStackFrameSizeLog;
48  CHECK(res < kNumberOfSizeClasses);
49  CHECK(ClassSize(res) >= rounded_size);
50  return res;
51}
52
53void FakeFrameFifo::FifoPush(FakeFrame *node) {
54  CHECK(node);
55  node->next = 0;
56  if (first_ == 0 && last_ == 0) {
57    first_ = last_ = node;
58  } else {
59    CHECK(first_);
60    CHECK(last_);
61    last_->next = node;
62    last_ = node;
63  }
64}
65
66FakeFrame *FakeFrameFifo::FifoPop() {
67  CHECK(first_ && last_ && "Exhausted fake stack");
68  FakeFrame *res = 0;
69  if (first_ == last_) {
70    res = first_;
71    first_ = last_ = 0;
72  } else {
73    res = first_;
74    first_ = first_->next;
75  }
76  return res;
77}
78
79void FakeStack::Init(uptr stack_size) {
80  stack_size_ = stack_size;
81  alive_ = true;
82}
83
84void FakeStack::Cleanup() {
85  alive_ = false;
86  for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
87    uptr mem = allocated_size_classes_[i];
88    if (mem) {
89      PoisonShadow(mem, ClassMmapSize(i), 0);
90      allocated_size_classes_[i] = 0;
91      UnmapOrDie((void*)mem, ClassMmapSize(i));
92    }
93  }
94}
95
96uptr FakeStack::ClassMmapSize(uptr size_class) {
97  return RoundUpToPowerOfTwo(stack_size_);
98}
99
100void FakeStack::AllocateOneSizeClass(uptr size_class) {
101  CHECK(ClassMmapSize(size_class) >= GetPageSizeCached());
102  uptr new_mem = (uptr)MmapOrDie(
103      ClassMmapSize(size_class), __FUNCTION__);
104  // Printf("T%d new_mem[%zu]: %p-%p mmap %zu\n",
105  //       GetCurrentThread()->tid(),
106  //       size_class, new_mem, new_mem + ClassMmapSize(size_class),
107  //       ClassMmapSize(size_class));
108  uptr i;
109  for (i = 0; i < ClassMmapSize(size_class);
110       i += ClassSize(size_class)) {
111    size_classes_[size_class].FifoPush((FakeFrame*)(new_mem + i));
112  }
113  CHECK(i == ClassMmapSize(size_class));
114  allocated_size_classes_[size_class] = new_mem;
115}
116
117uptr FakeStack::AllocateStack(uptr size, uptr real_stack) {
118  if (!alive_) return real_stack;
119  CHECK(size <= kMaxStackMallocSize && size > 1);
120  uptr size_class = ComputeSizeClass(size);
121  if (!allocated_size_classes_[size_class]) {
122    AllocateOneSizeClass(size_class);
123  }
124  FakeFrame *fake_frame = size_classes_[size_class].FifoPop();
125  CHECK(fake_frame);
126  fake_frame->size_minus_one = size - 1;
127  fake_frame->real_stack = real_stack;
128  while (FakeFrame *top = call_stack_.top()) {
129    if (top->real_stack > real_stack) break;
130    call_stack_.LifoPop();
131    DeallocateFrame(top);
132  }
133  call_stack_.LifoPush(fake_frame);
134  uptr ptr = (uptr)fake_frame;
135  PoisonShadow(ptr, size, 0);
136  return ptr;
137}
138
139void FakeStack::DeallocateFrame(FakeFrame *fake_frame) {
140  CHECK(alive_);
141  uptr size = fake_frame->size_minus_one + 1;
142  uptr size_class = ComputeSizeClass(size);
143  CHECK(allocated_size_classes_[size_class]);
144  uptr ptr = (uptr)fake_frame;
145  CHECK(AddrIsInSizeClass(ptr, size_class));
146  CHECK(AddrIsInSizeClass(ptr + size - 1, size_class));
147  size_classes_[size_class].FifoPush(fake_frame);
148}
149
150void FakeStack::OnFree(uptr ptr, uptr size, uptr real_stack) {
151  FakeFrame *fake_frame = (FakeFrame*)ptr;
152  CHECK(fake_frame->magic = kRetiredStackFrameMagic);
153  CHECK(fake_frame->descr != 0);
154  CHECK(fake_frame->size_minus_one == size - 1);
155  PoisonShadow(ptr, size, kAsanStackAfterReturnMagic);
156}
157
158}  // namespace __asan
159
160// ---------------------- Interface ---------------- {{{1
161using namespace __asan;  // NOLINT
162
163uptr __asan_stack_malloc(uptr size, uptr real_stack) {
164  if (!flags()->use_fake_stack) return real_stack;
165  AsanThread *t = GetCurrentThread();
166  if (!t) {
167    // TSD is gone, use the real stack.
168    return real_stack;
169  }
170  uptr ptr = t->fake_stack().AllocateStack(size, real_stack);
171  // Printf("__asan_stack_malloc %p %zu %p\n", ptr, size, real_stack);
172  return ptr;
173}
174
175void __asan_stack_free(uptr ptr, uptr size, uptr real_stack) {
176  if (!flags()->use_fake_stack) return;
177  if (ptr != real_stack) {
178    FakeStack::OnFree(ptr, size, real_stack);
179  }
180}
181