1//===-- asan_allocator.h ----------------------------------------*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of AddressSanitizer, an address sanity checker.
11//
12// ASan-private header for asan_allocator2.cc.
13//===----------------------------------------------------------------------===//
14
15#ifndef ASAN_ALLOCATOR_H
16#define ASAN_ALLOCATOR_H
17
18#include "asan_internal.h"
19#include "asan_interceptors.h"
20#include "sanitizer_common/sanitizer_allocator.h"
21#include "sanitizer_common/sanitizer_list.h"
22
23namespace __asan {
24
25enum AllocType {
26  FROM_MALLOC = 1,  // Memory block came from malloc, calloc, realloc, etc.
27  FROM_NEW = 2,     // Memory block came from operator new.
28  FROM_NEW_BR = 3   // Memory block came from operator new [ ]
29};
30
31static const uptr kNumberOfSizeClasses = 255;
32struct AsanChunk;
33
34void InitializeAllocator();
35void ReInitializeAllocator();
36
37class AsanChunkView {
38 public:
39  explicit AsanChunkView(AsanChunk *chunk) : chunk_(chunk) {}
40  bool IsValid();   // Checks if AsanChunkView points to a valid allocated
41                    // or quarantined chunk.
42  uptr Beg();       // First byte of user memory.
43  uptr End();       // Last byte of user memory.
44  uptr UsedSize();  // Size requested by the user.
45  uptr AllocTid();
46  uptr FreeTid();
47  bool Eq(const AsanChunkView &c) const { return chunk_ == c.chunk_; }
48  void GetAllocStack(StackTrace *stack);
49  void GetFreeStack(StackTrace *stack);
50  bool AddrIsInside(uptr addr, uptr access_size, sptr *offset) {
51    if (addr >= Beg() && (addr + access_size) <= End()) {
52      *offset = addr - Beg();
53      return true;
54    }
55    return false;
56  }
57  bool AddrIsAtLeft(uptr addr, uptr access_size, sptr *offset) {
58    (void)access_size;
59    if (addr < Beg()) {
60      *offset = Beg() - addr;
61      return true;
62    }
63    return false;
64  }
65  bool AddrIsAtRight(uptr addr, uptr access_size, sptr *offset) {
66    if (addr + access_size > End()) {
67      *offset = addr - End();
68      return true;
69    }
70    return false;
71  }
72
73 private:
74  AsanChunk *const chunk_;
75};
76
77AsanChunkView FindHeapChunkByAddress(uptr address);
78
79// List of AsanChunks with total size.
80class AsanChunkFifoList: public IntrusiveList<AsanChunk> {
81 public:
82  explicit AsanChunkFifoList(LinkerInitialized) { }
83  AsanChunkFifoList() { clear(); }
84  void Push(AsanChunk *n);
85  void PushList(AsanChunkFifoList *q);
86  AsanChunk *Pop();
87  uptr size() { return size_; }
88  void clear() {
89    IntrusiveList<AsanChunk>::clear();
90    size_ = 0;
91  }
92 private:
93  uptr size_;
94};
95
96struct AsanMapUnmapCallback {
97  void OnMap(uptr p, uptr size) const;
98  void OnUnmap(uptr p, uptr size) const;
99};
100
101#if SANITIZER_CAN_USE_ALLOCATOR64
102# if defined(__powerpc64__)
103const uptr kAllocatorSpace =  0xa0000000000ULL;
104const uptr kAllocatorSize  =  0x20000000000ULL;  // 2T.
105# else
106const uptr kAllocatorSpace = 0x600000000000ULL;
107const uptr kAllocatorSize  =  0x40000000000ULL;  // 4T.
108# endif
109typedef DefaultSizeClassMap SizeClassMap;
110typedef SizeClassAllocator64<kAllocatorSpace, kAllocatorSize, 0 /*metadata*/,
111    SizeClassMap, AsanMapUnmapCallback> PrimaryAllocator;
112#else  // Fallback to SizeClassAllocator32.
113static const uptr kRegionSizeLog = 20;
114static const uptr kNumRegions = SANITIZER_MMAP_RANGE_SIZE >> kRegionSizeLog;
115# if SANITIZER_WORDSIZE == 32
116typedef FlatByteMap<kNumRegions> ByteMap;
117# elif SANITIZER_WORDSIZE == 64
118typedef TwoLevelByteMap<(kNumRegions >> 12), 1 << 12> ByteMap;
119# endif
120typedef CompactSizeClassMap SizeClassMap;
121typedef SizeClassAllocator32<0, SANITIZER_MMAP_RANGE_SIZE, 16,
122  SizeClassMap, kRegionSizeLog,
123  ByteMap,
124  AsanMapUnmapCallback> PrimaryAllocator;
125#endif  // SANITIZER_CAN_USE_ALLOCATOR64
126
127typedef SizeClassAllocatorLocalCache<PrimaryAllocator> AllocatorCache;
128typedef LargeMmapAllocator<AsanMapUnmapCallback> SecondaryAllocator;
129typedef CombinedAllocator<PrimaryAllocator, AllocatorCache,
130    SecondaryAllocator> Allocator;
131
132
133struct AsanThreadLocalMallocStorage {
134  uptr quarantine_cache[16];
135  AllocatorCache allocator2_cache;
136  void CommitBack();
137 private:
138  // These objects are allocated via mmap() and are zero-initialized.
139  AsanThreadLocalMallocStorage() {}
140};
141
142void *asan_memalign(uptr alignment, uptr size, StackTrace *stack,
143                    AllocType alloc_type);
144void asan_free(void *ptr, StackTrace *stack, AllocType alloc_type);
145
146void *asan_malloc(uptr size, StackTrace *stack);
147void *asan_calloc(uptr nmemb, uptr size, StackTrace *stack);
148void *asan_realloc(void *p, uptr size, StackTrace *stack);
149void *asan_valloc(uptr size, StackTrace *stack);
150void *asan_pvalloc(uptr size, StackTrace *stack);
151
152int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
153                          StackTrace *stack);
154uptr asan_malloc_usable_size(void *ptr, uptr pc, uptr bp);
155
156uptr asan_mz_size(const void *ptr);
157void asan_mz_force_lock();
158void asan_mz_force_unlock();
159
160void PrintInternalAllocatorStats();
161
162}  // namespace __asan
163#endif  // ASAN_ALLOCATOR_H
164