asan_fake_stack.cc revision 73dc36ea7dd04a2ea52ffae91670913731de209a
1//===-- asan_fake_stack.cc ------------------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// FakeStack is used to detect use-after-return bugs.
13//===----------------------------------------------------------------------===//
14#include "asan_allocator.h"
15#include "asan_poisoning.h"
16#include "asan_thread.h"
17
18namespace __asan {
19
20bool FakeStack::AddrIsInSizeClass(uptr addr, uptr size_class) {
21  uptr mem = allocated_size_classes_[size_class];
22  uptr size = ClassMmapSize(size_class);
23  bool res = mem && addr >= mem && addr < mem + size;
24  return res;
25}
26
27uptr FakeStack::AddrIsInFakeStack(uptr addr) {
28  for (uptr size_class = 0; size_class < kNumberOfSizeClasses; size_class++) {
29    if (!AddrIsInSizeClass(addr, size_class)) continue;
30    uptr size_class_first_ptr = allocated_size_classes_[size_class];
31    uptr size = ClassSize(size_class);
32    CHECK_LE(size_class_first_ptr, addr);
33    CHECK_GT(size_class_first_ptr + ClassMmapSize(size_class), addr);
34    return size_class_first_ptr + ((addr - size_class_first_ptr) / size) * size;
35  }
36  return 0;
37}
38
39// We may want to compute this during compilation.
40ALWAYS_INLINE uptr FakeStack::ComputeSizeClass(uptr alloc_size) {
41  uptr rounded_size = RoundUpToPowerOfTwo(alloc_size);
42  uptr log = Log2(rounded_size);
43  CHECK_LE(alloc_size, (1UL << log));
44  CHECK_GT(alloc_size, (1UL << (log-1)));
45  uptr res = log < kMinStackFrameSizeLog ? 0 : log - kMinStackFrameSizeLog;
46  CHECK_LT(res, kNumberOfSizeClasses);
47  CHECK_GE(ClassSize(res), rounded_size);
48  return res;
49}
50
51void FakeFrameFifo::FifoPush(FakeFrame *node) {
52  CHECK(node);
53  node->next = 0;
54  if (first_ == 0 && last_ == 0) {
55    first_ = last_ = node;
56  } else {
57    CHECK(first_);
58    CHECK(last_);
59    last_->next = node;
60    last_ = node;
61  }
62}
63
64FakeFrame *FakeFrameFifo::FifoPop() {
65  CHECK(first_ && last_ && "Exhausted fake stack");
66  FakeFrame *res = 0;
67  if (first_ == last_) {
68    res = first_;
69    first_ = last_ = 0;
70  } else {
71    res = first_;
72    first_ = first_->next;
73  }
74  return res;
75}
76
77void FakeStack::Init(uptr stack_size) {
78  stack_size_ = stack_size;
79  alive_ = true;
80}
81
82void FakeStack::Cleanup() {
83  alive_ = false;
84  for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
85    uptr mem = allocated_size_classes_[i];
86    if (mem) {
87      PoisonShadow(mem, ClassMmapSize(i), 0);
88      allocated_size_classes_[i] = 0;
89      UnmapOrDie((void*)mem, ClassMmapSize(i));
90    }
91  }
92}
93
94uptr FakeStack::ClassMmapSize(uptr size_class) {
95  // Limit allocation size to ClassSize * MaxDepth when running with unlimited
96  // stack.
97  return RoundUpTo(Min(ClassSize(size_class) * kMaxRecursionDepth, stack_size_),
98                   GetPageSizeCached());
99}
100
101void FakeStack::AllocateOneSizeClass(uptr size_class) {
102  CHECK(ClassMmapSize(size_class) >= GetPageSizeCached());
103  uptr new_mem = (uptr)MmapOrDie(
104      ClassMmapSize(size_class), __FUNCTION__);
105  if (0) {
106    Printf("T%d new_mem[%zu]: %p-%p mmap %zu\n",
107           GetCurrentThread()->tid(),
108           size_class, new_mem, new_mem + ClassMmapSize(size_class),
109           ClassMmapSize(size_class));
110  }
111  uptr i;
112  uptr size = ClassSize(size_class);
113  for (i = 0; i + size <= ClassMmapSize(size_class); i += size) {
114    size_classes_[size_class].FifoPush((FakeFrame*)(new_mem + i));
115  }
116  CHECK_LE(i, ClassMmapSize(size_class));
117  allocated_size_classes_[size_class] = new_mem;
118}
119
120ALWAYS_INLINE uptr FakeStack::AllocateStack(uptr size, uptr real_stack) {
121  if (!alive_) return real_stack;
122  CHECK(size <= kMaxStackMallocSize && size > 1);
123  uptr size_class = ComputeSizeClass(size);
124  if (!allocated_size_classes_[size_class]) {
125    AllocateOneSizeClass(size_class);
126  }
127  FakeFrame *fake_frame = size_classes_[size_class].FifoPop();
128  CHECK(fake_frame);
129  fake_frame->size_minus_one = size - 1;
130  fake_frame->real_stack = real_stack;
131  while (FakeFrame *top = call_stack_.top()) {
132    if (top->real_stack > real_stack) break;
133    call_stack_.LifoPop();
134    DeallocateFrame(top);
135  }
136  call_stack_.LifoPush(fake_frame);
137  uptr ptr = (uptr)fake_frame;
138  PoisonShadow(ptr, size, 0);
139  return ptr;
140}
141
142ALWAYS_INLINE void FakeStack::DeallocateFrame(FakeFrame *fake_frame) {
143  CHECK(alive_);
144  uptr size = static_cast<uptr>(fake_frame->size_minus_one + 1);
145  uptr size_class = ComputeSizeClass(size);
146  CHECK(allocated_size_classes_[size_class]);
147  uptr ptr = (uptr)fake_frame;
148  CHECK(AddrIsInSizeClass(ptr, size_class));
149  CHECK(AddrIsInSizeClass(ptr + size - 1, size_class));
150  size_classes_[size_class].FifoPush(fake_frame);
151}
152
153ALWAYS_INLINE void FakeStack::OnFree(uptr ptr, uptr size, uptr real_stack) {
154  FakeFrame *fake_frame = (FakeFrame*)ptr;
155  CHECK_EQ(fake_frame->magic, kRetiredStackFrameMagic);
156  CHECK_NE(fake_frame->descr, 0);
157  CHECK_EQ(fake_frame->size_minus_one, size - 1);
158  PoisonShadow(ptr, size, kAsanStackAfterReturnMagic);
159}
160
161}  // namespace __asan
162
163// ---------------------- Interface ---------------- {{{1
164using namespace __asan;  // NOLINT
165
166uptr __asan_stack_malloc(uptr size, uptr real_stack) {
167  if (!flags()->use_fake_stack) return real_stack;
168  AsanThread *t = GetCurrentThread();
169  if (!t) {
170    // TSD is gone, use the real stack.
171    return real_stack;
172  }
173  t->LazyInitFakeStack();
174  uptr ptr = t->fake_stack()->AllocateStack(size, real_stack);
175  // Printf("__asan_stack_malloc %p %zu %p\n", ptr, size, real_stack);
176  return ptr;
177}
178
179void __asan_stack_free(uptr ptr, uptr size, uptr real_stack) {
180  if (!flags()->use_fake_stack) return;
181  if (ptr != real_stack) {
182    FakeStack::OnFree(ptr, size, real_stack);
183  }
184}
185