sanitizer_allocator_test.cc revision bdd844cb41718c27ef727a99a236191bc29a3df8
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "sanitizer_test_utils.h"
18
19#include "gtest/gtest.h"
20
21#include <stdlib.h>
22#include <pthread.h>
23#include <algorithm>
24#include <vector>
25
26// Too slow for debug build
27#if TSAN_DEBUG == 0
28
29#if SANITIZER_WORDSIZE == 64
30static const uptr kAllocatorSpace = 0x700000000000ULL;
31static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
32static const u64 kAddressSpaceSize = 1ULL << 47;
33
34typedef SizeClassAllocator64<
35  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
36
37typedef SizeClassAllocator64<
38  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
39#else
40static const u64 kAddressSpaceSize = 1ULL << 32;
41#endif
42
43typedef SizeClassAllocator32<
44  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
45
46template <class SizeClassMap>
47void TestSizeClassMap() {
48  typedef SizeClassMap SCMap;
49  // SCMap::Print();
50  SCMap::Validate();
51}
52
53TEST(SanitizerCommon, DefaultSizeClassMap) {
54  TestSizeClassMap<DefaultSizeClassMap>();
55}
56
57TEST(SanitizerCommon, CompactSizeClassMap) {
58  TestSizeClassMap<CompactSizeClassMap>();
59}
60
61template <class Allocator>
62void TestSizeClassAllocator() {
63  Allocator *a = new Allocator;
64  a->Init();
65  SizeClassAllocatorLocalCache<Allocator> cache;
66  memset(&cache, 0, sizeof(cache));
67  cache.Init(0);
68
69  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
70    50000, 60000, 100000, 120000, 300000, 500000, 1000000, 2000000};
71
72  std::vector<void *> allocated;
73
74  uptr last_total_allocated = 0;
75  for (int i = 0; i < 3; i++) {
76    // Allocate a bunch of chunks.
77    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
78      uptr size = sizes[s];
79      if (!a->CanAllocate(size, 1)) continue;
80      // printf("s = %ld\n", size);
81      uptr n_iter = std::max((uptr)6, 10000000 / size);
82      // fprintf(stderr, "size: %ld iter: %ld\n", size, n_iter);
83      for (uptr i = 0; i < n_iter; i++) {
84        uptr class_id0 = Allocator::SizeClassMapT::ClassID(size);
85        char *x = (char*)cache.Allocate(a, class_id0);
86        x[0] = 0;
87        x[size - 1] = 0;
88        x[size / 2] = 0;
89        allocated.push_back(x);
90        CHECK_EQ(x, a->GetBlockBegin(x));
91        CHECK_EQ(x, a->GetBlockBegin(x + size - 1));
92        CHECK(a->PointerIsMine(x));
93        CHECK(a->PointerIsMine(x + size - 1));
94        CHECK(a->PointerIsMine(x + size / 2));
95        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
96        uptr class_id = a->GetSizeClass(x);
97        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
98        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
99        metadata[0] = reinterpret_cast<uptr>(x) + 1;
100        metadata[1] = 0xABCD;
101      }
102    }
103    // Deallocate all.
104    for (uptr i = 0; i < allocated.size(); i++) {
105      void *x = allocated[i];
106      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
107      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
108      CHECK_EQ(metadata[1], 0xABCD);
109      cache.Deallocate(a, a->GetSizeClass(x), x);
110    }
111    allocated.clear();
112    uptr total_allocated = a->TotalMemoryUsed();
113    if (last_total_allocated == 0)
114      last_total_allocated = total_allocated;
115    CHECK_EQ(last_total_allocated, total_allocated);
116  }
117
118  a->TestOnlyUnmap();
119  delete a;
120}
121
122#if SANITIZER_WORDSIZE == 64
123TEST(SanitizerCommon, SizeClassAllocator64) {
124  TestSizeClassAllocator<Allocator64>();
125}
126
127TEST(SanitizerCommon, SizeClassAllocator64Compact) {
128  TestSizeClassAllocator<Allocator64Compact>();
129}
130#endif
131
132TEST(SanitizerCommon, SizeClassAllocator32Compact) {
133  TestSizeClassAllocator<Allocator32Compact>();
134}
135
136template <class Allocator>
137void SizeClassAllocatorMetadataStress() {
138  Allocator *a = new Allocator;
139  a->Init();
140  SizeClassAllocatorLocalCache<Allocator> cache;
141  memset(&cache, 0, sizeof(cache));
142  cache.Init(0);
143  static volatile void *sink;
144
145  const uptr kNumAllocs = 10000;
146  void *allocated[kNumAllocs];
147  for (uptr i = 0; i < kNumAllocs; i++) {
148    void *x = cache.Allocate(a, 1 + i % 50);
149    allocated[i] = x;
150  }
151  // Get Metadata kNumAllocs^2 times.
152  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
153    sink = a->GetMetaData(allocated[i % kNumAllocs]);
154  }
155  for (uptr i = 0; i < kNumAllocs; i++) {
156    cache.Deallocate(a, 1 + i % 50, allocated[i]);
157  }
158
159  a->TestOnlyUnmap();
160  (void)sink;
161  delete a;
162}
163
164#if SANITIZER_WORDSIZE == 64
165TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
166  SizeClassAllocatorMetadataStress<Allocator64>();
167}
168
169TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
170  SizeClassAllocatorMetadataStress<Allocator64Compact>();
171}
172#endif
173TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
174  SizeClassAllocatorMetadataStress<Allocator32Compact>();
175}
176
177struct TestMapUnmapCallback {
178  static int map_count, unmap_count;
179  void OnMap(uptr p, uptr size) const { map_count++; }
180  void OnUnmap(uptr p, uptr size) const { unmap_count++; }
181};
182int TestMapUnmapCallback::map_count;
183int TestMapUnmapCallback::unmap_count;
184
185#if SANITIZER_WORDSIZE == 64
186TEST(SanitizerCommon, SizeClassAllocator64MapUnmapCallback) {
187  TestMapUnmapCallback::map_count = 0;
188  TestMapUnmapCallback::unmap_count = 0;
189  typedef SizeClassAllocator64<
190      kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap,
191      TestMapUnmapCallback> Allocator64WithCallBack;
192  Allocator64WithCallBack *a = new Allocator64WithCallBack;
193  a->Init();
194  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
195  SizeClassAllocatorLocalCache<Allocator64WithCallBack> cache;
196  memset(&cache, 0, sizeof(cache));
197  cache.Init(0);
198  AllocatorStats stats;
199  stats.Init();
200  a->AllocateBatch(&stats, &cache, 64);
201  EXPECT_EQ(TestMapUnmapCallback::map_count, 3);  // State + alloc + metadata.
202  a->TestOnlyUnmap();
203  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);  // The whole thing.
204  delete a;
205}
206#endif
207
208TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
209  TestMapUnmapCallback::map_count = 0;
210  TestMapUnmapCallback::unmap_count = 0;
211  typedef SizeClassAllocator32<
212      0, kAddressSpaceSize, 16, CompactSizeClassMap,
213      TestMapUnmapCallback> Allocator32WithCallBack;
214  Allocator32WithCallBack *a = new Allocator32WithCallBack;
215  a->Init();
216  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
217  SizeClassAllocatorLocalCache<Allocator32WithCallBack>  cache;
218  memset(&cache, 0, sizeof(cache));
219  cache.Init(0);
220  AllocatorStats stats;
221  stats.Init();
222  a->AllocateBatch(&stats, &cache, 64);
223  EXPECT_EQ(TestMapUnmapCallback::map_count, 2);  // alloc.
224  a->TestOnlyUnmap();
225  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2);  // The whole thing + alloc.
226  delete a;
227  // fprintf(stderr, "Map: %d Unmap: %d\n",
228  //         TestMapUnmapCallback::map_count,
229  //         TestMapUnmapCallback::unmap_count);
230}
231
232TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
233  TestMapUnmapCallback::map_count = 0;
234  TestMapUnmapCallback::unmap_count = 0;
235  LargeMmapAllocator<TestMapUnmapCallback> a;
236  a.Init();
237  AllocatorStats stats;
238  stats.Init();
239  void *x = a.Allocate(&stats, 1 << 20, 1);
240  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
241  a.Deallocate(&stats, x);
242  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
243}
244
245template<class Allocator>
246void FailInAssertionOnOOM() {
247  Allocator a;
248  a.Init();
249  SizeClassAllocatorLocalCache<Allocator> cache;
250  memset(&cache, 0, sizeof(cache));
251  cache.Init(0);
252  AllocatorStats stats;
253  stats.Init();
254  for (int i = 0; i < 1000000; i++) {
255    a.AllocateBatch(&stats, &cache, 64);
256  }
257
258  a.TestOnlyUnmap();
259}
260
261#if SANITIZER_WORDSIZE == 64
262TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
263  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
264}
265#endif
266
267TEST(SanitizerCommon, LargeMmapAllocator) {
268  LargeMmapAllocator<> a;
269  a.Init();
270  AllocatorStats stats;
271  stats.Init();
272
273  static const int kNumAllocs = 1000;
274  char *allocated[kNumAllocs];
275  static const uptr size = 4000;
276  // Allocate some.
277  for (int i = 0; i < kNumAllocs; i++) {
278    allocated[i] = (char *)a.Allocate(&stats, size, 1);
279    CHECK(a.PointerIsMine(allocated[i]));
280  }
281  // Deallocate all.
282  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
283  for (int i = 0; i < kNumAllocs; i++) {
284    char *p = allocated[i];
285    CHECK(a.PointerIsMine(p));
286    a.Deallocate(&stats, p);
287  }
288  // Check that non left.
289  CHECK_EQ(a.TotalMemoryUsed(), 0);
290
291  // Allocate some more, also add metadata.
292  for (int i = 0; i < kNumAllocs; i++) {
293    char *x = (char *)a.Allocate(&stats, size, 1);
294    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
295    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
296    *meta = i;
297    allocated[i] = x;
298  }
299  for (int i = 0; i < kNumAllocs * kNumAllocs; i++) {
300    char *p = allocated[i % kNumAllocs];
301    CHECK(a.PointerIsMine(p));
302    CHECK(a.PointerIsMine(p + 2000));
303  }
304  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
305  // Deallocate all in reverse order.
306  for (int i = 0; i < kNumAllocs; i++) {
307    int idx = kNumAllocs - i - 1;
308    char *p = allocated[idx];
309    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
310    CHECK_EQ(*meta, idx);
311    CHECK(a.PointerIsMine(p));
312    a.Deallocate(&stats, p);
313  }
314  CHECK_EQ(a.TotalMemoryUsed(), 0);
315
316  // Test alignments.
317  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
318  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
319    const uptr kNumAlignedAllocs = 100;
320    for (uptr i = 0; i < kNumAlignedAllocs; i++) {
321      uptr size = ((i % 10) + 1) * 4096;
322      char *p = allocated[i] = (char *)a.Allocate(&stats, size, alignment);
323      CHECK_EQ(p, a.GetBlockBegin(p));
324      CHECK_EQ(p, a.GetBlockBegin(p + size - 1));
325      CHECK_EQ(p, a.GetBlockBegin(p + size / 2));
326      CHECK_EQ(0, (uptr)allocated[i] % alignment);
327      p[0] = p[size - 1] = 0;
328    }
329    for (uptr i = 0; i < kNumAlignedAllocs; i++) {
330      a.Deallocate(&stats, allocated[i]);
331    }
332  }
333}
334
335template
336<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
337void TestCombinedAllocator() {
338  typedef
339      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
340      Allocator;
341  Allocator *a = new Allocator;
342  a->Init();
343
344  AllocatorCache cache;
345  memset(&cache, 0, sizeof(cache));
346  a->InitCache(&cache);
347
348  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
349  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
350  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
351  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
352  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
353
354  const uptr kNumAllocs = 100000;
355  const uptr kNumIter = 10;
356  for (uptr iter = 0; iter < kNumIter; iter++) {
357    std::vector<void*> allocated;
358    for (uptr i = 0; i < kNumAllocs; i++) {
359      uptr size = (i % (1 << 14)) + 1;
360      if ((i % 1024) == 0)
361        size = 1 << (10 + (i % 14));
362      void *x = a->Allocate(&cache, size, 1);
363      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
364      CHECK_EQ(*meta, 0);
365      *meta = size;
366      allocated.push_back(x);
367    }
368
369    random_shuffle(allocated.begin(), allocated.end());
370
371    for (uptr i = 0; i < kNumAllocs; i++) {
372      void *x = allocated[i];
373      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
374      CHECK_NE(*meta, 0);
375      CHECK(a->PointerIsMine(x));
376      *meta = 0;
377      a->Deallocate(&cache, x);
378    }
379    allocated.clear();
380    a->SwallowCache(&cache);
381  }
382  a->DestroyCache(&cache);
383  a->TestOnlyUnmap();
384}
385
386#if SANITIZER_WORDSIZE == 64
387TEST(SanitizerCommon, CombinedAllocator64) {
388  TestCombinedAllocator<Allocator64,
389      LargeMmapAllocator<>,
390      SizeClassAllocatorLocalCache<Allocator64> > ();
391}
392
393TEST(SanitizerCommon, CombinedAllocator64Compact) {
394  TestCombinedAllocator<Allocator64Compact,
395      LargeMmapAllocator<>,
396      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
397}
398#endif
399
400TEST(SanitizerCommon, CombinedAllocator32Compact) {
401  TestCombinedAllocator<Allocator32Compact,
402      LargeMmapAllocator<>,
403      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
404}
405
406template <class AllocatorCache>
407void TestSizeClassAllocatorLocalCache() {
408  AllocatorCache cache;
409  typedef typename AllocatorCache::Allocator Allocator;
410  Allocator *a = new Allocator();
411
412  a->Init();
413  memset(&cache, 0, sizeof(cache));
414  cache.Init(0);
415
416  const uptr kNumAllocs = 10000;
417  const int kNumIter = 100;
418  uptr saved_total = 0;
419  for (int class_id = 1; class_id <= 5; class_id++) {
420    for (int it = 0; it < kNumIter; it++) {
421      void *allocated[kNumAllocs];
422      for (uptr i = 0; i < kNumAllocs; i++) {
423        allocated[i] = cache.Allocate(a, class_id);
424      }
425      for (uptr i = 0; i < kNumAllocs; i++) {
426        cache.Deallocate(a, class_id, allocated[i]);
427      }
428      cache.Drain(a);
429      uptr total_allocated = a->TotalMemoryUsed();
430      if (it)
431        CHECK_EQ(saved_total, total_allocated);
432      saved_total = total_allocated;
433    }
434  }
435
436  a->TestOnlyUnmap();
437  delete a;
438}
439
440#if SANITIZER_WORDSIZE == 64
441TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
442  TestSizeClassAllocatorLocalCache<
443      SizeClassAllocatorLocalCache<Allocator64> >();
444}
445
446TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
447  TestSizeClassAllocatorLocalCache<
448      SizeClassAllocatorLocalCache<Allocator64Compact> >();
449}
450#endif
451
452TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
453  TestSizeClassAllocatorLocalCache<
454      SizeClassAllocatorLocalCache<Allocator32Compact> >();
455}
456
457#if SANITIZER_WORDSIZE == 64
458typedef SizeClassAllocatorLocalCache<Allocator64> AllocatorCache;
459static AllocatorCache static_allocator_cache;
460
461void *AllocatorLeakTestWorker(void *arg) {
462  typedef AllocatorCache::Allocator Allocator;
463  Allocator *a = (Allocator*)(arg);
464  static_allocator_cache.Allocate(a, 10);
465  static_allocator_cache.Drain(a);
466  return 0;
467}
468
469TEST(SanitizerCommon, AllocatorLeakTest) {
470  typedef AllocatorCache::Allocator Allocator;
471  Allocator a;
472  a.Init();
473  uptr total_used_memory = 0;
474  for (int i = 0; i < 100; i++) {
475    pthread_t t;
476    EXPECT_EQ(0, pthread_create(&t, 0, AllocatorLeakTestWorker, &a));
477    EXPECT_EQ(0, pthread_join(t, 0));
478    if (i == 0)
479      total_used_memory = a.TotalMemoryUsed();
480    EXPECT_EQ(a.TotalMemoryUsed(), total_used_memory);
481  }
482
483  a.TestOnlyUnmap();
484}
485#endif
486
487TEST(Allocator, Basic) {
488  char *p = (char*)InternalAlloc(10);
489  EXPECT_NE(p, (char*)0);
490  char *p2 = (char*)InternalAlloc(20);
491  EXPECT_NE(p2, (char*)0);
492  EXPECT_NE(p2, p);
493  InternalFree(p);
494  InternalFree(p2);
495}
496
497TEST(Allocator, Stress) {
498  const int kCount = 1000;
499  char *ptrs[kCount];
500  unsigned rnd = 42;
501  for (int i = 0; i < kCount; i++) {
502    uptr sz = my_rand_r(&rnd) % 1000;
503    char *p = (char*)InternalAlloc(sz);
504    EXPECT_NE(p, (char*)0);
505    ptrs[i] = p;
506  }
507  for (int i = 0; i < kCount; i++) {
508    InternalFree(ptrs[i]);
509  }
510}
511
512TEST(Allocator, ScopedBuffer) {
513  const int kSize = 512;
514  {
515    InternalScopedBuffer<int> int_buf(kSize);
516    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
517  }
518  InternalScopedBuffer<char> char_buf(kSize);
519  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
520  internal_memset(char_buf.data(), 'c', kSize);
521  for (int i = 0; i < kSize; i++) {
522    EXPECT_EQ('c', char_buf[i]);
523  }
524}
525
526#endif  // #if TSAN_DEBUG==0
527