asan_allocator.h revision 6a11cc1bc665f13a0fcafe4a6a84761216675af7
1//===-- asan_allocator.h ----------------------------------------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// ASan-private header for asan_allocator.cc.
13//===----------------------------------------------------------------------===//
14
15#ifndef ASAN_ALLOCATOR_H
16#define ASAN_ALLOCATOR_H
17
18#include "asan_internal.h"
19#include "asan_interceptors.h"
20#include "sanitizer_common/sanitizer_list.h"
21
22// We are in the process of transitioning from the old allocator (version 1)
23// to a new one (version 2). The change is quite intrusive so both allocators
24// will co-exist in the source base for a while. The actual allocator is chosen
25// at build time by redefining this macro.
26#ifndef ASAN_ALLOCATOR_VERSION
27# if (ASAN_LINUX && !ASAN_ANDROID) || ASAN_MAC
28#  define ASAN_ALLOCATOR_VERSION 2
29# else
30#  define ASAN_ALLOCATOR_VERSION 1
31# endif
32#endif  // ASAN_ALLOCATOR_VERSION
33
34namespace __asan {
35
36enum AllocType {
37  FROM_MALLOC = 1,  // Memory block came from malloc, calloc, realloc, etc.
38  FROM_NEW = 2,     // Memory block came from operator new.
39  FROM_NEW_BR = 3   // Memory block came from operator new [ ]
40};
41
42static const uptr kNumberOfSizeClasses = 255;
43struct AsanChunk;
44
45void InitializeAllocator();
46
47class AsanChunkView {
48 public:
49  explicit AsanChunkView(AsanChunk *chunk) : chunk_(chunk) {}
50  bool IsValid() { return chunk_ != 0; }
51  uptr Beg();       // first byte of user memory.
52  uptr End();       // last byte of user memory.
53  uptr UsedSize();  // size requested by the user.
54  uptr AllocTid();
55  uptr FreeTid();
56  void GetAllocStack(StackTrace *stack);
57  void GetFreeStack(StackTrace *stack);
58  bool AddrIsInside(uptr addr, uptr access_size, sptr *offset) {
59    if (addr >= Beg() && (addr + access_size) <= End()) {
60      *offset = addr - Beg();
61      return true;
62    }
63    return false;
64  }
65  bool AddrIsAtLeft(uptr addr, uptr access_size, sptr *offset) {
66    (void)access_size;
67    if (addr < Beg()) {
68      *offset = Beg() - addr;
69      return true;
70    }
71    return false;
72  }
73  bool AddrIsAtRight(uptr addr, uptr access_size, sptr *offset) {
74    if (addr + access_size >= End()) {
75      *offset = addr - End();
76      return true;
77    }
78    return false;
79  }
80
81 private:
82  AsanChunk *const chunk_;
83};
84
85AsanChunkView FindHeapChunkByAddress(uptr address);
86
87// List of AsanChunks with total size.
88class AsanChunkFifoList: public IntrusiveList<AsanChunk> {
89 public:
90  explicit AsanChunkFifoList(LinkerInitialized) { }
91  AsanChunkFifoList() { clear(); }
92  void Push(AsanChunk *n);
93  void PushList(AsanChunkFifoList *q);
94  AsanChunk *Pop();
95  uptr size() { return size_; }
96  void clear() {
97    IntrusiveList<AsanChunk>::clear();
98    size_ = 0;
99  }
100 private:
101  uptr size_;
102};
103
104struct AsanThreadLocalMallocStorage {
105  explicit AsanThreadLocalMallocStorage(LinkerInitialized x)
106#if ASAN_ALLOCATOR_VERSION == 1
107      : quarantine_(x)
108#endif
109      { }
110  AsanThreadLocalMallocStorage() {
111    CHECK(REAL(memset));
112    REAL(memset)(this, 0, sizeof(AsanThreadLocalMallocStorage));
113  }
114
115#if ASAN_ALLOCATOR_VERSION == 1
116  AsanChunkFifoList quarantine_;
117  AsanChunk *free_lists_[kNumberOfSizeClasses];
118#else
119  uptr quarantine_cache[16];
120  uptr allocator2_cache[96 * (512 * 8 + 16)];  // Opaque.
121#endif
122  void CommitBack();
123};
124
125// Fake stack frame contains local variables of one function.
126// This struct should fit into a stack redzone (32 bytes).
127struct FakeFrame {
128  uptr magic;  // Modified by the instrumented code.
129  uptr descr;  // Modified by the instrumented code.
130  FakeFrame *next;
131  u64 real_stack     : 48;
132  u64 size_minus_one : 16;
133};
134
135struct FakeFrameFifo {
136 public:
137  void FifoPush(FakeFrame *node);
138  FakeFrame *FifoPop();
139 private:
140  FakeFrame *first_, *last_;
141};
142
143class FakeFrameLifo {
144 public:
145  void LifoPush(FakeFrame *node) {
146    node->next = top_;
147    top_ = node;
148  }
149  void LifoPop() {
150    CHECK(top_);
151    top_ = top_->next;
152  }
153  FakeFrame *top() { return top_; }
154 private:
155  FakeFrame *top_;
156};
157
158// For each thread we create a fake stack and place stack objects on this fake
159// stack instead of the real stack. The fake stack is not really a stack but
160// a fast malloc-like allocator so that when a function exits the fake stack
161// is not poped but remains there for quite some time until gets used again.
162// So, we poison the objects on the fake stack when function returns.
163// It helps us find use-after-return bugs.
164// We can not rely on __asan_stack_free being called on every function exit,
165// so we maintain a lifo list of all current fake frames and update it on every
166// call to __asan_stack_malloc.
167class FakeStack {
168 public:
169  FakeStack();
170  explicit FakeStack(LinkerInitialized) {}
171  void Init(uptr stack_size);
172  void StopUsingFakeStack() { alive_ = false; }
173  void Cleanup();
174  uptr AllocateStack(uptr size, uptr real_stack);
175  static void OnFree(uptr ptr, uptr size, uptr real_stack);
176  // Return the bottom of the maped region.
177  uptr AddrIsInFakeStack(uptr addr);
178  bool StackSize() { return stack_size_; }
179
180 private:
181  static const uptr kMinStackFrameSizeLog = 9;  // Min frame is 512B.
182  static const uptr kMaxStackFrameSizeLog = 16;  // Max stack frame is 64K.
183  static const uptr kMaxStackMallocSize = 1 << kMaxStackFrameSizeLog;
184  static const uptr kNumberOfSizeClasses =
185      kMaxStackFrameSizeLog - kMinStackFrameSizeLog + 1;
186
187  bool AddrIsInSizeClass(uptr addr, uptr size_class);
188
189  // Each size class should be large enough to hold all frames.
190  uptr ClassMmapSize(uptr size_class);
191
192  uptr ClassSize(uptr size_class) {
193    return 1UL << (size_class + kMinStackFrameSizeLog);
194  }
195
196  void DeallocateFrame(FakeFrame *fake_frame);
197
198  uptr ComputeSizeClass(uptr alloc_size);
199  void AllocateOneSizeClass(uptr size_class);
200
201  uptr stack_size_;
202  bool   alive_;
203
204  uptr allocated_size_classes_[kNumberOfSizeClasses];
205  FakeFrameFifo size_classes_[kNumberOfSizeClasses];
206  FakeFrameLifo call_stack_;
207};
208
209void *asan_memalign(uptr alignment, uptr size, StackTrace *stack,
210                    AllocType alloc_type);
211void asan_free(void *ptr, StackTrace *stack, AllocType alloc_type);
212
213void *asan_malloc(uptr size, StackTrace *stack);
214void *asan_calloc(uptr nmemb, uptr size, StackTrace *stack);
215void *asan_realloc(void *p, uptr size, StackTrace *stack);
216void *asan_valloc(uptr size, StackTrace *stack);
217void *asan_pvalloc(uptr size, StackTrace *stack);
218
219int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
220                          StackTrace *stack);
221uptr asan_malloc_usable_size(void *ptr, StackTrace *stack);
222
223uptr asan_mz_size(const void *ptr);
224void asan_mz_force_lock();
225void asan_mz_force_unlock();
226
227void PrintInternalAllocatorStats();
228
229// Log2 and RoundUpToPowerOfTwo should be inlined for performance.
230#if defined(_WIN32) && !defined(__clang__)
231extern "C" {
232unsigned char _BitScanForward(unsigned long *index, unsigned long mask);  // NOLINT
233unsigned char _BitScanReverse(unsigned long *index, unsigned long mask);  // NOLINT
234#if defined(_WIN64)
235unsigned char _BitScanForward64(unsigned long *index, unsigned __int64 mask);  // NOLINT
236unsigned char _BitScanReverse64(unsigned long *index, unsigned __int64 mask);  // NOLINT
237#endif
238}
239#endif
240
241static inline uptr Log2(uptr x) {
242  CHECK(IsPowerOfTwo(x));
243#if !defined(_WIN32) || defined(__clang__)
244  return __builtin_ctzl(x);
245#elif defined(_WIN64)
246  unsigned long ret;  // NOLINT
247  _BitScanForward64(&ret, x);
248  return ret;
249#else
250  unsigned long ret;  // NOLINT
251  _BitScanForward(&ret, x);
252  return ret;
253#endif
254}
255
256static inline uptr RoundUpToPowerOfTwo(uptr size) {
257  CHECK(size);
258  if (IsPowerOfTwo(size)) return size;
259
260  unsigned long up;  // NOLINT
261#if !defined(_WIN32) || defined(__clang__)
262  up = SANITIZER_WORDSIZE - 1 - __builtin_clzl(size);
263#elif defined(_WIN64)
264  _BitScanReverse64(&up, size);
265#else
266  _BitScanReverse(&up, size);
267#endif
268  CHECK(size < (1ULL << (up + 1)));
269  CHECK(size > (1ULL << up));
270  return 1UL << (up + 1);
271}
272
273
274}  // namespace __asan
275#endif  // ASAN_ALLOCATOR_H
276