asan_allocator.h revision d618867d19e4d278e8040e1529f50287c6a4eea5
1//===-- asan_allocator.h ----------------------------------------*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file is a part of AddressSanitizer, an address sanity checker. 11// 12// ASan-private header for asan_allocator.cc. 13//===----------------------------------------------------------------------===// 14 15#ifndef ASAN_ALLOCATOR_H 16#define ASAN_ALLOCATOR_H 17 18#include "asan_internal.h" 19#include "asan_interceptors.h" 20#include "sanitizer_common/sanitizer_list.h" 21 22// We are in the process of transitioning from the old allocator (version 1) 23// to a new one (version 2). The change is quite intrusive so both allocators 24// will co-exist in the source base for a while. The actual allocator is chosen 25// at build time by redefining this macrozz. 26#define ASAN_ALLOCATOR_VERSION 2 27 28namespace __asan { 29 30enum AllocType { 31 FROM_MALLOC = 1, // Memory block came from malloc, calloc, realloc, etc. 32 FROM_NEW = 2, // Memory block came from operator new. 33 FROM_NEW_BR = 3 // Memory block came from operator new [ ] 34}; 35 36static const uptr kNumberOfSizeClasses = 255; 37struct AsanChunk; 38 39class AsanChunkView { 40 public: 41 explicit AsanChunkView(AsanChunk *chunk) : chunk_(chunk) {} 42 bool IsValid() { return chunk_ != 0; } 43 uptr Beg(); // first byte of user memory. 44 uptr End(); // last byte of user memory. 45 uptr UsedSize(); // size requested by the user. 46 uptr AllocTid(); 47 uptr FreeTid(); 48 void GetAllocStack(StackTrace *stack); 49 void GetFreeStack(StackTrace *stack); 50 bool AddrIsInside(uptr addr, uptr access_size, uptr *offset) { 51 if (addr >= Beg() && (addr + access_size) <= End()) { 52 *offset = addr - Beg(); 53 return true; 54 } 55 return false; 56 } 57 bool AddrIsAtLeft(uptr addr, uptr access_size, uptr *offset) { 58 (void)access_size; 59 if (addr < Beg()) { 60 *offset = Beg() - addr; 61 return true; 62 } 63 return false; 64 } 65 bool AddrIsAtRight(uptr addr, uptr access_size, uptr *offset) { 66 if (addr + access_size >= End()) { 67 if (addr <= End()) 68 *offset = 0; 69 else 70 *offset = addr - End(); 71 return true; 72 } 73 return false; 74 } 75 76 private: 77 AsanChunk *const chunk_; 78}; 79 80AsanChunkView FindHeapChunkByAddress(uptr address); 81 82// List of AsanChunks with total size. 83class AsanChunkFifoList: public IntrusiveList<AsanChunk> { 84 public: 85 explicit AsanChunkFifoList(LinkerInitialized) { } 86 AsanChunkFifoList() { clear(); } 87 void Push(AsanChunk *n); 88 void PushList(AsanChunkFifoList *q); 89 AsanChunk *Pop(); 90 uptr size() { return size_; } 91 void clear() { 92 IntrusiveList<AsanChunk>::clear(); 93 size_ = 0; 94 } 95 private: 96 uptr size_; 97}; 98 99struct AsanThreadLocalMallocStorage { 100 explicit AsanThreadLocalMallocStorage(LinkerInitialized x) 101#if ASAN_ALLOCATOR_VERSION == 1 102 : quarantine_(x) 103#endif 104 { } 105 AsanThreadLocalMallocStorage() { 106 CHECK(REAL(memset)); 107 REAL(memset)(this, 0, sizeof(AsanThreadLocalMallocStorage)); 108 } 109 110#if ASAN_ALLOCATOR_VERSION == 1 111 AsanChunkFifoList quarantine_; 112 AsanChunk *free_lists_[kNumberOfSizeClasses]; 113#else 114 uptr quarantine_cache[16]; 115 uptr allocator2_cache[1024]; // Opaque. 116#endif 117 void CommitBack(); 118}; 119 120// Fake stack frame contains local variables of one function. 121// This struct should fit into a stack redzone (32 bytes). 122struct FakeFrame { 123 uptr magic; // Modified by the instrumented code. 124 uptr descr; // Modified by the instrumented code. 125 FakeFrame *next; 126 u64 real_stack : 48; 127 u64 size_minus_one : 16; 128}; 129 130struct FakeFrameFifo { 131 public: 132 void FifoPush(FakeFrame *node); 133 FakeFrame *FifoPop(); 134 private: 135 FakeFrame *first_, *last_; 136}; 137 138class FakeFrameLifo { 139 public: 140 void LifoPush(FakeFrame *node) { 141 node->next = top_; 142 top_ = node; 143 } 144 void LifoPop() { 145 CHECK(top_); 146 top_ = top_->next; 147 } 148 FakeFrame *top() { return top_; } 149 private: 150 FakeFrame *top_; 151}; 152 153// For each thread we create a fake stack and place stack objects on this fake 154// stack instead of the real stack. The fake stack is not really a stack but 155// a fast malloc-like allocator so that when a function exits the fake stack 156// is not poped but remains there for quite some time until gets used again. 157// So, we poison the objects on the fake stack when function returns. 158// It helps us find use-after-return bugs. 159// We can not rely on __asan_stack_free being called on every function exit, 160// so we maintain a lifo list of all current fake frames and update it on every 161// call to __asan_stack_malloc. 162class FakeStack { 163 public: 164 FakeStack(); 165 explicit FakeStack(LinkerInitialized) {} 166 void Init(uptr stack_size); 167 void StopUsingFakeStack() { alive_ = false; } 168 void Cleanup(); 169 uptr AllocateStack(uptr size, uptr real_stack); 170 static void OnFree(uptr ptr, uptr size, uptr real_stack); 171 // Return the bottom of the maped region. 172 uptr AddrIsInFakeStack(uptr addr); 173 bool StackSize() { return stack_size_; } 174 175 private: 176 static const uptr kMinStackFrameSizeLog = 9; // Min frame is 512B. 177 static const uptr kMaxStackFrameSizeLog = 16; // Max stack frame is 64K. 178 static const uptr kMaxStackMallocSize = 1 << kMaxStackFrameSizeLog; 179 static const uptr kNumberOfSizeClasses = 180 kMaxStackFrameSizeLog - kMinStackFrameSizeLog + 1; 181 182 bool AddrIsInSizeClass(uptr addr, uptr size_class); 183 184 // Each size class should be large enough to hold all frames. 185 uptr ClassMmapSize(uptr size_class); 186 187 uptr ClassSize(uptr size_class) { 188 return 1UL << (size_class + kMinStackFrameSizeLog); 189 } 190 191 void DeallocateFrame(FakeFrame *fake_frame); 192 193 uptr ComputeSizeClass(uptr alloc_size); 194 void AllocateOneSizeClass(uptr size_class); 195 196 uptr stack_size_; 197 bool alive_; 198 199 uptr allocated_size_classes_[kNumberOfSizeClasses]; 200 FakeFrameFifo size_classes_[kNumberOfSizeClasses]; 201 FakeFrameLifo call_stack_; 202}; 203 204void *asan_memalign(uptr alignment, uptr size, StackTrace *stack, 205 AllocType alloc_type); 206void asan_free(void *ptr, StackTrace *stack, AllocType alloc_type); 207 208void *asan_malloc(uptr size, StackTrace *stack); 209void *asan_calloc(uptr nmemb, uptr size, StackTrace *stack); 210void *asan_realloc(void *p, uptr size, StackTrace *stack); 211void *asan_valloc(uptr size, StackTrace *stack); 212void *asan_pvalloc(uptr size, StackTrace *stack); 213 214int asan_posix_memalign(void **memptr, uptr alignment, uptr size, 215 StackTrace *stack); 216uptr asan_malloc_usable_size(void *ptr, StackTrace *stack); 217 218uptr asan_mz_size(const void *ptr); 219void asan_mz_force_lock(); 220void asan_mz_force_unlock(); 221 222void PrintInternalAllocatorStats(); 223 224// Log2 and RoundUpToPowerOfTwo should be inlined for performance. 225#if defined(_WIN32) && !defined(__clang__) 226extern "C" { 227unsigned char _BitScanForward(unsigned long *index, unsigned long mask); // NOLINT 228unsigned char _BitScanReverse(unsigned long *index, unsigned long mask); // NOLINT 229#if defined(_WIN64) 230unsigned char _BitScanForward64(unsigned long *index, unsigned __int64 mask); // NOLINT 231unsigned char _BitScanReverse64(unsigned long *index, unsigned __int64 mask); // NOLINT 232#endif 233} 234#endif 235 236static inline uptr Log2(uptr x) { 237 CHECK(IsPowerOfTwo(x)); 238#if !defined(_WIN32) || defined(__clang__) 239 return __builtin_ctzl(x); 240#elif defined(_WIN64) 241 unsigned long ret; // NOLINT 242 _BitScanForward64(&ret, x); 243 return ret; 244#else 245 unsigned long ret; // NOLINT 246 _BitScanForward(&ret, x); 247 return ret; 248#endif 249} 250 251static inline uptr RoundUpToPowerOfTwo(uptr size) { 252 CHECK(size); 253 if (IsPowerOfTwo(size)) return size; 254 255 unsigned long up; // NOLINT 256#if !defined(_WIN32) || defined(__clang__) 257 up = SANITIZER_WORDSIZE - 1 - __builtin_clzl(size); 258#elif defined(_WIN64) 259 _BitScanReverse64(&up, size); 260#else 261 _BitScanReverse(&up, size); 262#endif 263 CHECK(size < (1ULL << (up + 1))); 264 CHECK(size > (1ULL << up)); 265 return 1UL << (up + 1); 266} 267 268 269} // namespace __asan 270#endif // ASAN_ALLOCATOR_H 271