asan_allocator.h revision e2399c731b6599f7d608bf5dbbd0b92e1d71fd31
1//===-- asan_allocator.h ----------------------------------------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// ASan-private header for asan_allocator.cc.
13//===----------------------------------------------------------------------===//
14
15#ifndef ASAN_ALLOCATOR_H
16#define ASAN_ALLOCATOR_H
17
18#include "asan_internal.h"
19#include "asan_interceptors.h"
20#include "sanitizer_common/sanitizer_list.h"
21
22// We are in the process of transitioning from the old allocator (version 1)
23// to a new one (version 2). The change is quite intrusive so both allocators
24// will co-exist in the source base for a while. The actual allocator is chosen
25// at build time by redefining this macro.
26#ifndef ASAN_ALLOCATOR_VERSION
27# if ASAN_LINUX && !ASAN_ANDROID
28#  define ASAN_ALLOCATOR_VERSION 2
29# else
30#  define ASAN_ALLOCATOR_VERSION 1
31# endif
32#endif  // ASAN_ALLOCATOR_VERSION
33
34namespace __asan {
35
36enum AllocType {
37  FROM_MALLOC = 1,  // Memory block came from malloc, calloc, realloc, etc.
38  FROM_NEW = 2,     // Memory block came from operator new.
39  FROM_NEW_BR = 3   // Memory block came from operator new [ ]
40};
41
42static const uptr kNumberOfSizeClasses = 255;
43struct AsanChunk;
44
45class AsanChunkView {
46 public:
47  explicit AsanChunkView(AsanChunk *chunk) : chunk_(chunk) {}
48  bool IsValid() { return chunk_ != 0; }
49  uptr Beg();       // first byte of user memory.
50  uptr End();       // last byte of user memory.
51  uptr UsedSize();  // size requested by the user.
52  uptr AllocTid();
53  uptr FreeTid();
54  void GetAllocStack(StackTrace *stack);
55  void GetFreeStack(StackTrace *stack);
56  bool AddrIsInside(uptr addr, uptr access_size, uptr *offset) {
57    if (addr >= Beg() && (addr + access_size) <= End()) {
58      *offset = addr - Beg();
59      return true;
60    }
61    return false;
62  }
63  bool AddrIsAtLeft(uptr addr, uptr access_size, uptr *offset) {
64    (void)access_size;
65    if (addr < Beg()) {
66      *offset = Beg() - addr;
67      return true;
68    }
69    return false;
70  }
71  bool AddrIsAtRight(uptr addr, uptr access_size, uptr *offset) {
72    if (addr + access_size >= End()) {
73      if (addr <= End())
74        *offset = 0;
75      else
76        *offset = addr - End();
77      return true;
78    }
79    return false;
80  }
81
82 private:
83  AsanChunk *const chunk_;
84};
85
86AsanChunkView FindHeapChunkByAddress(uptr address);
87
88// List of AsanChunks with total size.
89class AsanChunkFifoList: public IntrusiveList<AsanChunk> {
90 public:
91  explicit AsanChunkFifoList(LinkerInitialized) { }
92  AsanChunkFifoList() { clear(); }
93  void Push(AsanChunk *n);
94  void PushList(AsanChunkFifoList *q);
95  AsanChunk *Pop();
96  uptr size() { return size_; }
97  void clear() {
98    IntrusiveList<AsanChunk>::clear();
99    size_ = 0;
100  }
101 private:
102  uptr size_;
103};
104
105struct AsanThreadLocalMallocStorage {
106  explicit AsanThreadLocalMallocStorage(LinkerInitialized x)
107#if ASAN_ALLOCATOR_VERSION == 1
108      : quarantine_(x)
109#endif
110      { }
111  AsanThreadLocalMallocStorage() {
112    CHECK(REAL(memset));
113    REAL(memset)(this, 0, sizeof(AsanThreadLocalMallocStorage));
114  }
115
116#if ASAN_ALLOCATOR_VERSION == 1
117  AsanChunkFifoList quarantine_;
118  AsanChunk *free_lists_[kNumberOfSizeClasses];
119#else
120  uptr quarantine_cache[16];
121  uptr allocator2_cache[96 * (512 * 8 + 16)];  // Opaque.
122#endif
123  void CommitBack();
124};
125
126// Fake stack frame contains local variables of one function.
127// This struct should fit into a stack redzone (32 bytes).
128struct FakeFrame {
129  uptr magic;  // Modified by the instrumented code.
130  uptr descr;  // Modified by the instrumented code.
131  FakeFrame *next;
132  u64 real_stack     : 48;
133  u64 size_minus_one : 16;
134};
135
136struct FakeFrameFifo {
137 public:
138  void FifoPush(FakeFrame *node);
139  FakeFrame *FifoPop();
140 private:
141  FakeFrame *first_, *last_;
142};
143
144class FakeFrameLifo {
145 public:
146  void LifoPush(FakeFrame *node) {
147    node->next = top_;
148    top_ = node;
149  }
150  void LifoPop() {
151    CHECK(top_);
152    top_ = top_->next;
153  }
154  FakeFrame *top() { return top_; }
155 private:
156  FakeFrame *top_;
157};
158
159// For each thread we create a fake stack and place stack objects on this fake
160// stack instead of the real stack. The fake stack is not really a stack but
161// a fast malloc-like allocator so that when a function exits the fake stack
162// is not poped but remains there for quite some time until gets used again.
163// So, we poison the objects on the fake stack when function returns.
164// It helps us find use-after-return bugs.
165// We can not rely on __asan_stack_free being called on every function exit,
166// so we maintain a lifo list of all current fake frames and update it on every
167// call to __asan_stack_malloc.
168class FakeStack {
169 public:
170  FakeStack();
171  explicit FakeStack(LinkerInitialized) {}
172  void Init(uptr stack_size);
173  void StopUsingFakeStack() { alive_ = false; }
174  void Cleanup();
175  uptr AllocateStack(uptr size, uptr real_stack);
176  static void OnFree(uptr ptr, uptr size, uptr real_stack);
177  // Return the bottom of the maped region.
178  uptr AddrIsInFakeStack(uptr addr);
179  bool StackSize() { return stack_size_; }
180
181 private:
182  static const uptr kMinStackFrameSizeLog = 9;  // Min frame is 512B.
183  static const uptr kMaxStackFrameSizeLog = 16;  // Max stack frame is 64K.
184  static const uptr kMaxStackMallocSize = 1 << kMaxStackFrameSizeLog;
185  static const uptr kNumberOfSizeClasses =
186      kMaxStackFrameSizeLog - kMinStackFrameSizeLog + 1;
187
188  bool AddrIsInSizeClass(uptr addr, uptr size_class);
189
190  // Each size class should be large enough to hold all frames.
191  uptr ClassMmapSize(uptr size_class);
192
193  uptr ClassSize(uptr size_class) {
194    return 1UL << (size_class + kMinStackFrameSizeLog);
195  }
196
197  void DeallocateFrame(FakeFrame *fake_frame);
198
199  uptr ComputeSizeClass(uptr alloc_size);
200  void AllocateOneSizeClass(uptr size_class);
201
202  uptr stack_size_;
203  bool   alive_;
204
205  uptr allocated_size_classes_[kNumberOfSizeClasses];
206  FakeFrameFifo size_classes_[kNumberOfSizeClasses];
207  FakeFrameLifo call_stack_;
208};
209
210void *asan_memalign(uptr alignment, uptr size, StackTrace *stack,
211                    AllocType alloc_type);
212void asan_free(void *ptr, StackTrace *stack, AllocType alloc_type);
213
214void *asan_malloc(uptr size, StackTrace *stack);
215void *asan_calloc(uptr nmemb, uptr size, StackTrace *stack);
216void *asan_realloc(void *p, uptr size, StackTrace *stack);
217void *asan_valloc(uptr size, StackTrace *stack);
218void *asan_pvalloc(uptr size, StackTrace *stack);
219
220int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
221                          StackTrace *stack);
222uptr asan_malloc_usable_size(void *ptr, StackTrace *stack);
223
224uptr asan_mz_size(const void *ptr);
225void asan_mz_force_lock();
226void asan_mz_force_unlock();
227
228void PrintInternalAllocatorStats();
229
230// Log2 and RoundUpToPowerOfTwo should be inlined for performance.
231#if defined(_WIN32) && !defined(__clang__)
232extern "C" {
233unsigned char _BitScanForward(unsigned long *index, unsigned long mask);  // NOLINT
234unsigned char _BitScanReverse(unsigned long *index, unsigned long mask);  // NOLINT
235#if defined(_WIN64)
236unsigned char _BitScanForward64(unsigned long *index, unsigned __int64 mask);  // NOLINT
237unsigned char _BitScanReverse64(unsigned long *index, unsigned __int64 mask);  // NOLINT
238#endif
239}
240#endif
241
242static inline uptr Log2(uptr x) {
243  CHECK(IsPowerOfTwo(x));
244#if !defined(_WIN32) || defined(__clang__)
245  return __builtin_ctzl(x);
246#elif defined(_WIN64)
247  unsigned long ret;  // NOLINT
248  _BitScanForward64(&ret, x);
249  return ret;
250#else
251  unsigned long ret;  // NOLINT
252  _BitScanForward(&ret, x);
253  return ret;
254#endif
255}
256
257static inline uptr RoundUpToPowerOfTwo(uptr size) {
258  CHECK(size);
259  if (IsPowerOfTwo(size)) return size;
260
261  unsigned long up;  // NOLINT
262#if !defined(_WIN32) || defined(__clang__)
263  up = SANITIZER_WORDSIZE - 1 - __builtin_clzl(size);
264#elif defined(_WIN64)
265  _BitScanReverse64(&up, size);
266#else
267  _BitScanReverse(&up, size);
268#endif
269  CHECK(size < (1ULL << (up + 1)));
270  CHECK(size > (1ULL << up));
271  return 1UL << (up + 1);
272}
273
274
275}  // namespace __asan
276#endif  // ASAN_ALLOCATOR_H
277