sanitizer_allocator_test.cc revision 8a41bdc963afdf3c2e14aad64a7cd3291fefd8cf
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <algorithm>
21#include <vector>
22
23#if SANITIZER_WORDSIZE == 64
24static const uptr kAllocatorSpace = 0x700000000000ULL;
25static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
26static const u64 kAddressSpaceSize = 1ULL << 47;
27
28typedef SizeClassAllocator64<
29  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
30
31typedef SizeClassAllocator64<
32  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
33#else
34static const u64 kAddressSpaceSize = 1ULL << 32;
35#endif
36
37typedef SizeClassAllocator32<
38  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
39
40template <class SizeClassMap>
41void TestSizeClassMap() {
42  typedef SizeClassMap SCMap;
43#if 0
44  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
45    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
46        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
47        SCMap::MaxCached(i));
48  }
49#endif
50  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
51    uptr s = SCMap::Size(c);
52    CHECK_EQ(SCMap::ClassID(s), c);
53    if (c != SCMap::kNumClasses - 1)
54      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
55    CHECK_EQ(SCMap::ClassID(s - 1), c);
56    if (c)
57      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
58  }
59  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
60
61  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
62    uptr c = SCMap::ClassID(s);
63    CHECK_LT(c, SCMap::kNumClasses);
64    CHECK_GE(SCMap::Size(c), s);
65    if (c > 0)
66      CHECK_LT(SCMap::Size(c-1), s);
67  }
68}
69
70TEST(SanitizerCommon, DefaultSizeClassMap) {
71  TestSizeClassMap<DefaultSizeClassMap>();
72}
73
74TEST(SanitizerCommon, CompactSizeClassMap) {
75  TestSizeClassMap<CompactSizeClassMap>();
76}
77
78template <class Allocator>
79void TestSizeClassAllocator() {
80  Allocator *a = new Allocator;
81  a->Init();
82
83  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
84    50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
85
86  std::vector<void *> allocated;
87
88  uptr last_total_allocated = 0;
89  for (int i = 0; i < 5; i++) {
90    // Allocate a bunch of chunks.
91    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
92      uptr size = sizes[s];
93      if (!a->CanAllocate(size, 1)) continue;
94      // printf("s = %ld\n", size);
95      uptr n_iter = std::max((uptr)2, 1000000 / size);
96      for (uptr i = 0; i < n_iter; i++) {
97        void *x = a->Allocate(size, 1);
98        allocated.push_back(x);
99        CHECK_EQ(x, a->GetBlockBegin(x));
100        CHECK_EQ(x, a->GetBlockBegin((char*)x + size - 1));
101        CHECK(a->PointerIsMine(x));
102        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
103        uptr class_id = a->GetSizeClass(x);
104        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
105        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
106        metadata[0] = reinterpret_cast<uptr>(x) + 1;
107        metadata[1] = 0xABCD;
108      }
109    }
110    // Deallocate all.
111    for (uptr i = 0; i < allocated.size(); i++) {
112      void *x = allocated[i];
113      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
114      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
115      CHECK_EQ(metadata[1], 0xABCD);
116      a->Deallocate(x);
117    }
118    allocated.clear();
119    uptr total_allocated = a->TotalMemoryUsed();
120    if (last_total_allocated == 0)
121      last_total_allocated = total_allocated;
122    CHECK_EQ(last_total_allocated, total_allocated);
123  }
124
125  a->TestOnlyUnmap();
126  delete a;
127}
128
129#if SANITIZER_WORDSIZE == 64
130TEST(SanitizerCommon, SizeClassAllocator64) {
131  TestSizeClassAllocator<Allocator64>();
132}
133
134TEST(SanitizerCommon, SizeClassAllocator64Compact) {
135  TestSizeClassAllocator<Allocator64Compact>();
136}
137#endif
138
139TEST(SanitizerCommon, SizeClassAllocator32Compact) {
140  TestSizeClassAllocator<Allocator32Compact>();
141}
142
143template <class Allocator>
144void SizeClassAllocatorMetadataStress() {
145  Allocator *a = new Allocator;
146  a->Init();
147  static volatile void *sink;
148
149  const uptr kNumAllocs = 10000;
150  void *allocated[kNumAllocs];
151  for (uptr i = 0; i < kNumAllocs; i++) {
152    uptr size = (i % 4096) + 1;
153    void *x = a->Allocate(size, 1);
154    allocated[i] = x;
155  }
156  // Get Metadata kNumAllocs^2 times.
157  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
158    sink = a->GetMetaData(allocated[i % kNumAllocs]);
159  }
160  for (uptr i = 0; i < kNumAllocs; i++) {
161    a->Deallocate(allocated[i]);
162  }
163
164  a->TestOnlyUnmap();
165  (void)sink;
166  delete a;
167}
168
169#if SANITIZER_WORDSIZE == 64
170TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
171  SizeClassAllocatorMetadataStress<Allocator64>();
172}
173
174TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
175  SizeClassAllocatorMetadataStress<Allocator64Compact>();
176}
177#endif
178TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
179  SizeClassAllocatorMetadataStress<Allocator32Compact>();
180}
181
182template<class Allocator>
183void FailInAssertionOnOOM() {
184  Allocator a;
185  a.Init();
186  const uptr size = 1 << 20;
187  for (int i = 0; i < 1000000; i++) {
188    a.Allocate(size, 1);
189  }
190
191  a.TestOnlyUnmap();
192}
193
194#if SANITIZER_WORDSIZE == 64
195TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
196  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
197}
198#endif
199
200TEST(SanitizerCommon, LargeMmapAllocator) {
201  LargeMmapAllocator a;
202  a.Init();
203
204  static const int kNumAllocs = 100;
205  void *allocated[kNumAllocs];
206  static const uptr size = 1000;
207  // Allocate some.
208  for (int i = 0; i < kNumAllocs; i++) {
209    allocated[i] = a.Allocate(size, 1);
210  }
211  // Deallocate all.
212  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
213  for (int i = 0; i < kNumAllocs; i++) {
214    void *p = allocated[i];
215    CHECK(a.PointerIsMine(p));
216    a.Deallocate(p);
217  }
218  // Check that non left.
219  CHECK_EQ(a.TotalMemoryUsed(), 0);
220
221  // Allocate some more, also add metadata.
222  for (int i = 0; i < kNumAllocs; i++) {
223    void *x = a.Allocate(size, 1);
224    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
225    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
226    *meta = i;
227    allocated[i] = x;
228  }
229  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
230  // Deallocate all in reverse order.
231  for (int i = 0; i < kNumAllocs; i++) {
232    int idx = kNumAllocs - i - 1;
233    void *p = allocated[idx];
234    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
235    CHECK_EQ(*meta, idx);
236    CHECK(a.PointerIsMine(p));
237    a.Deallocate(p);
238  }
239  CHECK_EQ(a.TotalMemoryUsed(), 0);
240  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
241  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
242    for (int i = 0; i < kNumAllocs; i++) {
243      uptr size = ((i % 10) + 1) * 4096;
244      allocated[i] = a.Allocate(size, alignment);
245      CHECK_EQ(0, (uptr)allocated[i] % alignment);
246      char *p = (char*)allocated[i];
247      p[0] = p[size - 1] = 0;
248    }
249    for (int i = 0; i < kNumAllocs; i++) {
250      a.Deallocate(allocated[i]);
251    }
252  }
253}
254
255template
256<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
257void TestCombinedAllocator() {
258  CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator> a;
259  a.Init();
260
261  AllocatorCache cache;
262  cache.Init();
263
264  EXPECT_EQ(a.Allocate(&cache, -1, 1), (void*)0);
265  EXPECT_EQ(a.Allocate(&cache, -1, 1024), (void*)0);
266  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
267  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
268  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
269
270  const uptr kNumAllocs = 100000;
271  const uptr kNumIter = 10;
272  for (uptr iter = 0; iter < kNumIter; iter++) {
273    std::vector<void*> allocated;
274    for (uptr i = 0; i < kNumAllocs; i++) {
275      uptr size = (i % (1 << 14)) + 1;
276      if ((i % 1024) == 0)
277        size = 1 << (10 + (i % 14));
278      void *x = a.Allocate(&cache, size, 1);
279      uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
280      CHECK_EQ(*meta, 0);
281      *meta = size;
282      allocated.push_back(x);
283    }
284
285    random_shuffle(allocated.begin(), allocated.end());
286
287    for (uptr i = 0; i < kNumAllocs; i++) {
288      void *x = allocated[i];
289      uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
290      CHECK_NE(*meta, 0);
291      CHECK(a.PointerIsMine(x));
292      *meta = 0;
293      a.Deallocate(&cache, x);
294    }
295    allocated.clear();
296    a.SwallowCache(&cache);
297  }
298  a.TestOnlyUnmap();
299}
300
301#if SANITIZER_WORDSIZE == 64
302TEST(SanitizerCommon, CombinedAllocator) {
303  TestCombinedAllocator<Allocator64,
304      LargeMmapAllocator,
305      SizeClassAllocatorLocalCache<Allocator64> > ();
306}
307#endif
308
309template <class AllocatorCache>
310void TestSizeClassAllocatorLocalCache() {
311  static THREADLOCAL AllocatorCache static_allocator_cache;
312  static_allocator_cache.Init();
313  AllocatorCache cache;
314  typename AllocatorCache::Allocator a;
315
316  a.Init();
317  cache.Init();
318
319  const uptr kNumAllocs = 10000;
320  const int kNumIter = 100;
321  uptr saved_total = 0;
322  for (int i = 0; i < kNumIter; i++) {
323    void *allocated[kNumAllocs];
324    for (uptr i = 0; i < kNumAllocs; i++) {
325      allocated[i] = cache.Allocate(&a, 0);
326    }
327    for (uptr i = 0; i < kNumAllocs; i++) {
328      cache.Deallocate(&a, 0, allocated[i]);
329    }
330    cache.Drain(&a);
331    uptr total_allocated = a.TotalMemoryUsed();
332    if (saved_total)
333      CHECK_EQ(saved_total, total_allocated);
334    saved_total = total_allocated;
335  }
336
337  a.TestOnlyUnmap();
338}
339
340#if SANITIZER_WORDSIZE == 64
341TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
342  TestSizeClassAllocatorLocalCache<
343      SizeClassAllocatorLocalCache<Allocator64> >();
344}
345#endif
346
347TEST(Allocator, Basic) {
348  char *p = (char*)InternalAlloc(10);
349  EXPECT_NE(p, (char*)0);
350  char *p2 = (char*)InternalAlloc(20);
351  EXPECT_NE(p2, (char*)0);
352  EXPECT_NE(p2, p);
353  InternalFree(p);
354  InternalFree(p2);
355}
356
357TEST(Allocator, Stress) {
358  const int kCount = 1000;
359  char *ptrs[kCount];
360  unsigned rnd = 42;
361  for (int i = 0; i < kCount; i++) {
362    uptr sz = rand_r(&rnd) % 1000;
363    char *p = (char*)InternalAlloc(sz);
364    EXPECT_NE(p, (char*)0);
365    ptrs[i] = p;
366  }
367  for (int i = 0; i < kCount; i++) {
368    InternalFree(ptrs[i]);
369  }
370}
371
372TEST(Allocator, ScopedBuffer) {
373  const int kSize = 512;
374  {
375    InternalScopedBuffer<int> int_buf(kSize);
376    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
377  }
378  InternalScopedBuffer<char> char_buf(kSize);
379  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
380  memset(char_buf.data(), 'c', kSize);
381  for (int i = 0; i < kSize; i++) {
382    EXPECT_EQ('c', char_buf[i]);
383  }
384}
385