sanitizer_allocator_test.cc revision 567ad078d73babb2c8addfbebb1ddd6cd0085c53
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <algorithm>
21#include <vector>
22
23#if SANITIZER_WORDSIZE == 64
24static const uptr kAllocatorSpace = 0x700000000000ULL;
25static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
26static const u64 kAddressSpaceSize = 1ULL << 47;
27
28typedef SizeClassAllocator64<
29  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
30
31typedef SizeClassAllocator64<
32  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
33#else
34static const u64 kAddressSpaceSize = 1ULL << 32;
35#endif
36
37typedef SizeClassAllocator32<
38  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
39
40template <class SizeClassMap>
41void TestSizeClassMap() {
42  typedef SizeClassMap SCMap;
43#if 0
44  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
45    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
46        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
47        SCMap::MaxCached(i));
48  }
49#endif
50  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
51    uptr s = SCMap::Size(c);
52    CHECK_EQ(SCMap::ClassID(s), c);
53    if (c != SCMap::kNumClasses - 1)
54      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
55    CHECK_EQ(SCMap::ClassID(s - 1), c);
56    if (c)
57      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
58  }
59  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
60
61  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
62    uptr c = SCMap::ClassID(s);
63    CHECK_LT(c, SCMap::kNumClasses);
64    CHECK_GE(SCMap::Size(c), s);
65    if (c > 0)
66      CHECK_LT(SCMap::Size(c-1), s);
67  }
68}
69
70TEST(SanitizerCommon, DefaultSizeClassMap) {
71  TestSizeClassMap<DefaultSizeClassMap>();
72}
73
74TEST(SanitizerCommon, CompactSizeClassMap) {
75  TestSizeClassMap<CompactSizeClassMap>();
76}
77
78template <class Allocator>
79void TestSizeClassAllocator() {
80  Allocator *a = new Allocator;
81  a->Init();
82
83  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
84    50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
85
86  std::vector<void *> allocated;
87
88  uptr last_total_allocated = 0;
89  for (int i = 0; i < 5; i++) {
90    // Allocate a bunch of chunks.
91    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
92      uptr size = sizes[s];
93      if (!a->CanAllocate(size, 1)) continue;
94      // printf("s = %ld\n", size);
95      uptr n_iter = std::max((uptr)2, 1000000 / size);
96      for (uptr i = 0; i < n_iter; i++) {
97        void *x = a->Allocate(size, 1);
98        allocated.push_back(x);
99        CHECK_EQ(x, a->GetBlockBegin(x));
100        CHECK_EQ(x, a->GetBlockBegin((char*)x + size - 1));
101        CHECK(a->PointerIsMine(x));
102        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
103        uptr class_id = a->GetSizeClass(x);
104        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
105        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
106        metadata[0] = reinterpret_cast<uptr>(x) + 1;
107        metadata[1] = 0xABCD;
108      }
109    }
110    // Deallocate all.
111    for (uptr i = 0; i < allocated.size(); i++) {
112      void *x = allocated[i];
113      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
114      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
115      CHECK_EQ(metadata[1], 0xABCD);
116      a->Deallocate(x);
117    }
118    allocated.clear();
119    uptr total_allocated = a->TotalMemoryUsed();
120    if (last_total_allocated == 0)
121      last_total_allocated = total_allocated;
122    CHECK_EQ(last_total_allocated, total_allocated);
123  }
124
125  a->TestOnlyUnmap();
126  delete a;
127}
128
129#if SANITIZER_WORDSIZE == 64
130TEST(SanitizerCommon, SizeClassAllocator64) {
131  TestSizeClassAllocator<Allocator64>();
132}
133
134TEST(SanitizerCommon, SizeClassAllocator64Compact) {
135  TestSizeClassAllocator<Allocator64Compact>();
136}
137#endif
138
139TEST(SanitizerCommon, SizeClassAllocator32Compact) {
140  TestSizeClassAllocator<Allocator32Compact>();
141}
142
143template <class Allocator>
144void SizeClassAllocatorMetadataStress() {
145  Allocator *a = new Allocator;
146  a->Init();
147  static volatile void *sink;
148
149  const uptr kNumAllocs = 10000;
150  void *allocated[kNumAllocs];
151  for (uptr i = 0; i < kNumAllocs; i++) {
152    uptr size = (i % 4096) + 1;
153    void *x = a->Allocate(size, 1);
154    allocated[i] = x;
155  }
156  // Get Metadata kNumAllocs^2 times.
157  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
158    sink = a->GetMetaData(allocated[i % kNumAllocs]);
159  }
160  for (uptr i = 0; i < kNumAllocs; i++) {
161    a->Deallocate(allocated[i]);
162  }
163
164  a->TestOnlyUnmap();
165  (void)sink;
166  delete a;
167}
168
169#if SANITIZER_WORDSIZE == 64
170TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
171  SizeClassAllocatorMetadataStress<Allocator64>();
172}
173
174TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
175  SizeClassAllocatorMetadataStress<Allocator64Compact>();
176}
177#endif
178TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
179  SizeClassAllocatorMetadataStress<Allocator32Compact>();
180}
181
182struct TestMapUnmapCallback {
183  static int map_count, unmap_count;
184  void OnMap(uptr p, uptr size) const { map_count++; }
185  void OnUnmap(uptr p, uptr size) const { unmap_count++; }
186};
187int TestMapUnmapCallback::map_count;
188int TestMapUnmapCallback::unmap_count;
189
190#if SANITIZER_WORDSIZE == 64
191TEST(SanitizerCommon, SizeClassAllocator64MapUnmapCallback) {
192  TestMapUnmapCallback::map_count = 0;
193  TestMapUnmapCallback::unmap_count = 0;
194  typedef SizeClassAllocator64<
195      kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap,
196      TestMapUnmapCallback> Allocator64WithCallBack;
197  Allocator64WithCallBack *a = new Allocator64WithCallBack;
198  a->Init();
199  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
200  a->Allocate(100, 1);
201  EXPECT_EQ(TestMapUnmapCallback::map_count, 3);  // State + alloc + metadata.
202  a->TestOnlyUnmap();
203  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);  // The whole thing.
204  delete a;
205}
206#endif
207
208TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
209  TestMapUnmapCallback::map_count = 0;
210  TestMapUnmapCallback::unmap_count = 0;
211  typedef SizeClassAllocator32<
212      0, kAddressSpaceSize, 16, CompactSizeClassMap,
213      TestMapUnmapCallback> Allocator32WithCallBack;
214  Allocator32WithCallBack *a = new Allocator32WithCallBack;
215  a->Init();
216  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
217  a->Allocate(100, 1);
218  EXPECT_EQ(TestMapUnmapCallback::map_count, 2);  // alloc.
219  a->TestOnlyUnmap();
220  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2);  // The whole thing + alloc.
221  delete a;
222  fprintf(stderr, "Map: %d Unmap: %d\n",
223          TestMapUnmapCallback::map_count,
224          TestMapUnmapCallback::unmap_count);
225}
226
227TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
228  TestMapUnmapCallback::map_count = 0;
229  TestMapUnmapCallback::unmap_count = 0;
230  LargeMmapAllocator<TestMapUnmapCallback> a;
231  a.Init();
232  void *x = a.Allocate(1 << 20, 1);
233  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
234  a.Deallocate(x);
235  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
236}
237
238template<class Allocator>
239void FailInAssertionOnOOM() {
240  Allocator a;
241  a.Init();
242  const uptr size = 1 << 20;
243  for (int i = 0; i < 1000000; i++) {
244    a.Allocate(size, 1);
245  }
246
247  a.TestOnlyUnmap();
248}
249
250#if SANITIZER_WORDSIZE == 64
251TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
252  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
253}
254#endif
255
256TEST(SanitizerCommon, LargeMmapAllocator) {
257  LargeMmapAllocator<> a;
258  a.Init();
259
260  static const int kNumAllocs = 100;
261  void *allocated[kNumAllocs];
262  static const uptr size = 1000;
263  // Allocate some.
264  for (int i = 0; i < kNumAllocs; i++) {
265    allocated[i] = a.Allocate(size, 1);
266  }
267  // Deallocate all.
268  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
269  for (int i = 0; i < kNumAllocs; i++) {
270    void *p = allocated[i];
271    CHECK(a.PointerIsMine(p));
272    a.Deallocate(p);
273  }
274  // Check that non left.
275  CHECK_EQ(a.TotalMemoryUsed(), 0);
276
277  // Allocate some more, also add metadata.
278  for (int i = 0; i < kNumAllocs; i++) {
279    void *x = a.Allocate(size, 1);
280    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
281    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
282    *meta = i;
283    allocated[i] = x;
284  }
285  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
286  // Deallocate all in reverse order.
287  for (int i = 0; i < kNumAllocs; i++) {
288    int idx = kNumAllocs - i - 1;
289    void *p = allocated[idx];
290    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
291    CHECK_EQ(*meta, idx);
292    CHECK(a.PointerIsMine(p));
293    a.Deallocate(p);
294  }
295  CHECK_EQ(a.TotalMemoryUsed(), 0);
296  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
297  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
298    for (int i = 0; i < kNumAllocs; i++) {
299      uptr size = ((i % 10) + 1) * 4096;
300      allocated[i] = a.Allocate(size, alignment);
301      CHECK_EQ(0, (uptr)allocated[i] % alignment);
302      char *p = (char*)allocated[i];
303      p[0] = p[size - 1] = 0;
304    }
305    for (int i = 0; i < kNumAllocs; i++) {
306      a.Deallocate(allocated[i]);
307    }
308  }
309}
310
311template
312<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
313void TestCombinedAllocator() {
314  typedef
315      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
316      Allocator;
317  Allocator *a = new Allocator;
318  a->Init();
319
320  AllocatorCache cache;
321  cache.Init();
322
323  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
324  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
325  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
326  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
327  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
328
329  const uptr kNumAllocs = 100000;
330  const uptr kNumIter = 10;
331  for (uptr iter = 0; iter < kNumIter; iter++) {
332    std::vector<void*> allocated;
333    for (uptr i = 0; i < kNumAllocs; i++) {
334      uptr size = (i % (1 << 14)) + 1;
335      if ((i % 1024) == 0)
336        size = 1 << (10 + (i % 14));
337      void *x = a->Allocate(&cache, size, 1);
338      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
339      CHECK_EQ(*meta, 0);
340      *meta = size;
341      allocated.push_back(x);
342    }
343
344    random_shuffle(allocated.begin(), allocated.end());
345
346    for (uptr i = 0; i < kNumAllocs; i++) {
347      void *x = allocated[i];
348      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
349      CHECK_NE(*meta, 0);
350      CHECK(a->PointerIsMine(x));
351      *meta = 0;
352      a->Deallocate(&cache, x);
353    }
354    allocated.clear();
355    a->SwallowCache(&cache);
356  }
357  a->TestOnlyUnmap();
358}
359
360#if SANITIZER_WORDSIZE == 64
361TEST(SanitizerCommon, CombinedAllocator64) {
362  TestCombinedAllocator<Allocator64,
363      LargeMmapAllocator<>,
364      SizeClassAllocatorLocalCache<Allocator64> > ();
365}
366
367TEST(SanitizerCommon, CombinedAllocator64Compact) {
368  TestCombinedAllocator<Allocator64Compact,
369      LargeMmapAllocator<>,
370      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
371}
372#endif
373
374TEST(SanitizerCommon, CombinedAllocator32Compact) {
375  TestCombinedAllocator<Allocator32Compact,
376      LargeMmapAllocator<>,
377      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
378}
379
380template <class AllocatorCache>
381void TestSizeClassAllocatorLocalCache() {
382  static THREADLOCAL AllocatorCache static_allocator_cache;
383  static_allocator_cache.Init();
384  AllocatorCache cache;
385  typedef typename AllocatorCache::Allocator Allocator;
386  Allocator *a = new Allocator();
387
388  a->Init();
389  cache.Init();
390
391  const uptr kNumAllocs = 10000;
392  const int kNumIter = 100;
393  uptr saved_total = 0;
394  for (int i = 0; i < kNumIter; i++) {
395    void *allocated[kNumAllocs];
396    for (uptr i = 0; i < kNumAllocs; i++) {
397      allocated[i] = cache.Allocate(a, 0);
398    }
399    for (uptr i = 0; i < kNumAllocs; i++) {
400      cache.Deallocate(a, 0, allocated[i]);
401    }
402    cache.Drain(a);
403    uptr total_allocated = a->TotalMemoryUsed();
404    if (saved_total)
405      CHECK_EQ(saved_total, total_allocated);
406    saved_total = total_allocated;
407  }
408
409  a->TestOnlyUnmap();
410  delete a;
411}
412
413#if SANITIZER_WORDSIZE == 64
414TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
415  TestSizeClassAllocatorLocalCache<
416      SizeClassAllocatorLocalCache<Allocator64> >();
417}
418
419TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
420  TestSizeClassAllocatorLocalCache<
421      SizeClassAllocatorLocalCache<Allocator64Compact> >();
422}
423#endif
424
425TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
426  TestSizeClassAllocatorLocalCache<
427      SizeClassAllocatorLocalCache<Allocator32Compact> >();
428}
429
430TEST(Allocator, Basic) {
431  char *p = (char*)InternalAlloc(10);
432  EXPECT_NE(p, (char*)0);
433  char *p2 = (char*)InternalAlloc(20);
434  EXPECT_NE(p2, (char*)0);
435  EXPECT_NE(p2, p);
436  InternalFree(p);
437  InternalFree(p2);
438}
439
440TEST(Allocator, Stress) {
441  const int kCount = 1000;
442  char *ptrs[kCount];
443  unsigned rnd = 42;
444  for (int i = 0; i < kCount; i++) {
445    uptr sz = rand_r(&rnd) % 1000;
446    char *p = (char*)InternalAlloc(sz);
447    EXPECT_NE(p, (char*)0);
448    ptrs[i] = p;
449  }
450  for (int i = 0; i < kCount; i++) {
451    InternalFree(ptrs[i]);
452  }
453}
454
455TEST(Allocator, ScopedBuffer) {
456  const int kSize = 512;
457  {
458    InternalScopedBuffer<int> int_buf(kSize);
459    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
460  }
461  InternalScopedBuffer<char> char_buf(kSize);
462  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
463  internal_memset(char_buf.data(), 'c', kSize);
464  for (int i = 0; i < kSize; i++) {
465    EXPECT_EQ('c', char_buf[i]);
466  }
467}
468