sanitizer_allocator_test.cc revision b1f21c69e02d99944c66e0520d07d8c14d99756f
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <pthread.h>
21#include <algorithm>
22#include <vector>
23
24// Too slow for debug build
25#if TSAN_DEBUG == 0
26
27#if SANITIZER_WORDSIZE == 64
28static const uptr kAllocatorSpace = 0x700000000000ULL;
29static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
30static const u64 kAddressSpaceSize = 1ULL << 47;
31
32typedef SizeClassAllocator64<
33  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
34
35typedef SizeClassAllocator64<
36  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
37#else
38static const u64 kAddressSpaceSize = 1ULL << 32;
39#endif
40
41typedef SizeClassAllocator32<
42  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
43
44template <class SizeClassMap>
45void TestSizeClassMap() {
46  typedef SizeClassMap SCMap;
47#if 0
48  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
49    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
50        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
51        SCMap::MaxCached(i));
52  }
53#endif
54  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
55    uptr s = SCMap::Size(c);
56    CHECK_EQ(SCMap::ClassID(s), c);
57    if (c != SCMap::kNumClasses - 1)
58      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
59    CHECK_EQ(SCMap::ClassID(s - 1), c);
60    if (c)
61      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
62  }
63  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
64
65  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
66    uptr c = SCMap::ClassID(s);
67    CHECK_LT(c, SCMap::kNumClasses);
68    CHECK_GE(SCMap::Size(c), s);
69    if (c > 0)
70      CHECK_LT(SCMap::Size(c-1), s);
71  }
72}
73
74TEST(SanitizerCommon, DefaultSizeClassMap) {
75  TestSizeClassMap<DefaultSizeClassMap>();
76}
77
78TEST(SanitizerCommon, CompactSizeClassMap) {
79  TestSizeClassMap<CompactSizeClassMap>();
80}
81
82template <class Allocator>
83void TestSizeClassAllocator() {
84  Allocator *a = new Allocator;
85  a->Init();
86
87  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
88    50000, 60000, 100000, 120000, 300000, 500000, 1000000, 2000000};
89
90  std::vector<void *> allocated;
91
92  uptr last_total_allocated = 0;
93  for (int i = 0; i < 3; i++) {
94    // Allocate a bunch of chunks.
95    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
96      uptr size = sizes[s];
97      if (!a->CanAllocate(size, 1)) continue;
98      // printf("s = %ld\n", size);
99      uptr n_iter = std::max((uptr)6, 10000000 / size);
100      // fprintf(stderr, "size: %ld iter: %ld\n", size, n_iter);
101      for (uptr i = 0; i < n_iter; i++) {
102        char *x = (char*)a->Allocate(size, 1);
103        x[0] = 0;
104        x[size - 1] = 0;
105        x[size / 2] = 0;
106        allocated.push_back(x);
107        CHECK_EQ(x, a->GetBlockBegin(x));
108        CHECK_EQ(x, a->GetBlockBegin(x + size - 1));
109        CHECK(a->PointerIsMine(x));
110        CHECK(a->PointerIsMine(x + size - 1));
111        CHECK(a->PointerIsMine(x + size / 2));
112        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
113        uptr class_id = a->GetSizeClass(x);
114        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
115        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
116        metadata[0] = reinterpret_cast<uptr>(x) + 1;
117        metadata[1] = 0xABCD;
118      }
119    }
120    // Deallocate all.
121    for (uptr i = 0; i < allocated.size(); i++) {
122      void *x = allocated[i];
123      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
124      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
125      CHECK_EQ(metadata[1], 0xABCD);
126      a->Deallocate(x);
127    }
128    allocated.clear();
129    uptr total_allocated = a->TotalMemoryUsed();
130    if (last_total_allocated == 0)
131      last_total_allocated = total_allocated;
132    CHECK_EQ(last_total_allocated, total_allocated);
133  }
134
135  a->TestOnlyUnmap();
136  delete a;
137}
138
139#if SANITIZER_WORDSIZE == 64
140TEST(SanitizerCommon, SizeClassAllocator64) {
141  TestSizeClassAllocator<Allocator64>();
142}
143
144TEST(SanitizerCommon, SizeClassAllocator64Compact) {
145  TestSizeClassAllocator<Allocator64Compact>();
146}
147#endif
148
149TEST(SanitizerCommon, SizeClassAllocator32Compact) {
150  TestSizeClassAllocator<Allocator32Compact>();
151}
152
153template <class Allocator>
154void SizeClassAllocatorMetadataStress() {
155  Allocator *a = new Allocator;
156  a->Init();
157  static volatile void *sink;
158
159  const uptr kNumAllocs = 10000;
160  void *allocated[kNumAllocs];
161  for (uptr i = 0; i < kNumAllocs; i++) {
162    uptr size = (i % 4096) + 1;
163    void *x = a->Allocate(size, 1);
164    allocated[i] = x;
165  }
166  // Get Metadata kNumAllocs^2 times.
167  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
168    sink = a->GetMetaData(allocated[i % kNumAllocs]);
169  }
170  for (uptr i = 0; i < kNumAllocs; i++) {
171    a->Deallocate(allocated[i]);
172  }
173
174  a->TestOnlyUnmap();
175  (void)sink;
176  delete a;
177}
178
179#if SANITIZER_WORDSIZE == 64
180TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
181  SizeClassAllocatorMetadataStress<Allocator64>();
182}
183
184TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
185  SizeClassAllocatorMetadataStress<Allocator64Compact>();
186}
187#endif
188TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
189  SizeClassAllocatorMetadataStress<Allocator32Compact>();
190}
191
192struct TestMapUnmapCallback {
193  static int map_count, unmap_count;
194  void OnMap(uptr p, uptr size) const { map_count++; }
195  void OnUnmap(uptr p, uptr size) const { unmap_count++; }
196};
197int TestMapUnmapCallback::map_count;
198int TestMapUnmapCallback::unmap_count;
199
200#if SANITIZER_WORDSIZE == 64
201TEST(SanitizerCommon, SizeClassAllocator64MapUnmapCallback) {
202  TestMapUnmapCallback::map_count = 0;
203  TestMapUnmapCallback::unmap_count = 0;
204  typedef SizeClassAllocator64<
205      kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap,
206      TestMapUnmapCallback> Allocator64WithCallBack;
207  Allocator64WithCallBack *a = new Allocator64WithCallBack;
208  a->Init();
209  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
210  a->Allocate(100, 1);
211  EXPECT_EQ(TestMapUnmapCallback::map_count, 3);  // State + alloc + metadata.
212  a->TestOnlyUnmap();
213  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);  // The whole thing.
214  delete a;
215}
216#endif
217
218TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
219  TestMapUnmapCallback::map_count = 0;
220  TestMapUnmapCallback::unmap_count = 0;
221  typedef SizeClassAllocator32<
222      0, kAddressSpaceSize, 16, CompactSizeClassMap,
223      TestMapUnmapCallback> Allocator32WithCallBack;
224  Allocator32WithCallBack *a = new Allocator32WithCallBack;
225  a->Init();
226  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
227  a->Allocate(100, 1);
228  EXPECT_EQ(TestMapUnmapCallback::map_count, 2);  // alloc.
229  a->TestOnlyUnmap();
230  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2);  // The whole thing + alloc.
231  delete a;
232  // fprintf(stderr, "Map: %d Unmap: %d\n",
233  //         TestMapUnmapCallback::map_count,
234  //         TestMapUnmapCallback::unmap_count);
235}
236
237TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
238  TestMapUnmapCallback::map_count = 0;
239  TestMapUnmapCallback::unmap_count = 0;
240  LargeMmapAllocator<TestMapUnmapCallback> a;
241  a.Init();
242  void *x = a.Allocate(1 << 20, 1);
243  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
244  a.Deallocate(x);
245  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
246}
247
248template<class Allocator>
249void FailInAssertionOnOOM() {
250  Allocator a;
251  a.Init();
252  const uptr size = 1 << 20;
253  for (int i = 0; i < 1000000; i++) {
254    a.Allocate(size, 1);
255  }
256
257  a.TestOnlyUnmap();
258}
259
260#if SANITIZER_WORDSIZE == 64
261TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
262  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
263}
264#endif
265
266TEST(SanitizerCommon, LargeMmapAllocator) {
267  LargeMmapAllocator<> a;
268  a.Init();
269
270  static const int kNumAllocs = 100;
271  char *allocated[kNumAllocs];
272  static const uptr size = 1000;
273  // Allocate some.
274  for (int i = 0; i < kNumAllocs; i++) {
275    allocated[i] = (char *)a.Allocate(size, 1);
276  }
277  // Deallocate all.
278  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
279  for (int i = 0; i < kNumAllocs; i++) {
280    char *p = allocated[i];
281    CHECK(a.PointerIsMine(p));
282    a.Deallocate(p);
283  }
284  // Check that non left.
285  CHECK_EQ(a.TotalMemoryUsed(), 0);
286
287  // Allocate some more, also add metadata.
288  for (int i = 0; i < kNumAllocs; i++) {
289    char *x = (char *)a.Allocate(size, 1);
290    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
291    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
292    *meta = i;
293    allocated[i] = x;
294  }
295  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
296  // Deallocate all in reverse order.
297  for (int i = 0; i < kNumAllocs; i++) {
298    int idx = kNumAllocs - i - 1;
299    char *p = allocated[idx];
300    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
301    CHECK_EQ(*meta, idx);
302    CHECK(a.PointerIsMine(p));
303    a.Deallocate(p);
304  }
305  CHECK_EQ(a.TotalMemoryUsed(), 0);
306  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
307  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
308    for (int i = 0; i < kNumAllocs; i++) {
309      uptr size = ((i % 10) + 1) * 4096;
310      char *p = allocated[i] = (char *)a.Allocate(size, alignment);
311      CHECK_EQ(p, a.GetBlockBegin(p));
312      CHECK_EQ(p, a.GetBlockBegin(p + size - 1));
313      CHECK_EQ(p, a.GetBlockBegin(p + size / 2));
314      CHECK_EQ(0, (uptr)allocated[i] % alignment);
315      p[0] = p[size - 1] = 0;
316    }
317    for (int i = 0; i < kNumAllocs; i++) {
318      a.Deallocate(allocated[i]);
319    }
320  }
321}
322
323template
324<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
325void TestCombinedAllocator() {
326  typedef
327      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
328      Allocator;
329  Allocator *a = new Allocator;
330  a->Init();
331
332  AllocatorCache cache;
333  cache.Init();
334
335  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
336  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
337  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
338  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
339  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
340
341  const uptr kNumAllocs = 100000;
342  const uptr kNumIter = 10;
343  for (uptr iter = 0; iter < kNumIter; iter++) {
344    std::vector<void*> allocated;
345    for (uptr i = 0; i < kNumAllocs; i++) {
346      uptr size = (i % (1 << 14)) + 1;
347      if ((i % 1024) == 0)
348        size = 1 << (10 + (i % 14));
349      void *x = a->Allocate(&cache, size, 1);
350      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
351      CHECK_EQ(*meta, 0);
352      *meta = size;
353      allocated.push_back(x);
354    }
355
356    random_shuffle(allocated.begin(), allocated.end());
357
358    for (uptr i = 0; i < kNumAllocs; i++) {
359      void *x = allocated[i];
360      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
361      CHECK_NE(*meta, 0);
362      CHECK(a->PointerIsMine(x));
363      *meta = 0;
364      a->Deallocate(&cache, x);
365    }
366    allocated.clear();
367    a->SwallowCache(&cache);
368  }
369  a->TestOnlyUnmap();
370}
371
372#if SANITIZER_WORDSIZE == 64
373TEST(SanitizerCommon, CombinedAllocator64) {
374  TestCombinedAllocator<Allocator64,
375      LargeMmapAllocator<>,
376      SizeClassAllocatorLocalCache<Allocator64> > ();
377}
378
379TEST(SanitizerCommon, CombinedAllocator64Compact) {
380  TestCombinedAllocator<Allocator64Compact,
381      LargeMmapAllocator<>,
382      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
383}
384#endif
385
386TEST(SanitizerCommon, CombinedAllocator32Compact) {
387  TestCombinedAllocator<Allocator32Compact,
388      LargeMmapAllocator<>,
389      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
390}
391
392template <class AllocatorCache>
393void TestSizeClassAllocatorLocalCache() {
394  static THREADLOCAL AllocatorCache static_allocator_cache;
395  static_allocator_cache.Init();
396  AllocatorCache cache;
397  typedef typename AllocatorCache::Allocator Allocator;
398  Allocator *a = new Allocator();
399
400  a->Init();
401  cache.Init();
402
403  const uptr kNumAllocs = 10000;
404  const int kNumIter = 100;
405  uptr saved_total = 0;
406  for (int i = 0; i < kNumIter; i++) {
407    void *allocated[kNumAllocs];
408    for (uptr i = 0; i < kNumAllocs; i++) {
409      allocated[i] = cache.Allocate(a, 0);
410    }
411    for (uptr i = 0; i < kNumAllocs; i++) {
412      cache.Deallocate(a, 0, allocated[i]);
413    }
414    cache.Drain(a);
415    uptr total_allocated = a->TotalMemoryUsed();
416    if (saved_total)
417      CHECK_EQ(saved_total, total_allocated);
418    saved_total = total_allocated;
419  }
420
421  a->TestOnlyUnmap();
422  delete a;
423}
424
425#if SANITIZER_WORDSIZE == 64
426TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
427  TestSizeClassAllocatorLocalCache<
428      SizeClassAllocatorLocalCache<Allocator64> >();
429}
430
431TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
432  TestSizeClassAllocatorLocalCache<
433      SizeClassAllocatorLocalCache<Allocator64Compact> >();
434}
435#endif
436
437TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
438  TestSizeClassAllocatorLocalCache<
439      SizeClassAllocatorLocalCache<Allocator32Compact> >();
440}
441
442#if SANITIZER_WORDSIZE == 64
443typedef SizeClassAllocatorLocalCache<Allocator64> AllocatorCache;
444static THREADLOCAL AllocatorCache static_allocator_cache;
445
446void *AllocatorLeakTestWorker(void *arg) {
447  typedef AllocatorCache::Allocator Allocator;
448  Allocator *a = (Allocator*)(arg);
449  static_allocator_cache.Allocate(a, 10);
450  static_allocator_cache.Drain(a);
451  return 0;
452}
453
454TEST(SanitizerCommon, AllocatorLeakTest) {
455  typedef AllocatorCache::Allocator Allocator;
456  Allocator a;
457  a.Init();
458  uptr total_used_memory = 0;
459  for (int i = 0; i < 100; i++) {
460    pthread_t t;
461    EXPECT_EQ(0, pthread_create(&t, 0, AllocatorLeakTestWorker, &a));
462    EXPECT_EQ(0, pthread_join(t, 0));
463    if (i == 0)
464      total_used_memory = a.TotalMemoryUsed();
465    EXPECT_EQ(a.TotalMemoryUsed(), total_used_memory);
466  }
467
468  a.TestOnlyUnmap();
469}
470#endif
471
472TEST(Allocator, Basic) {
473  char *p = (char*)InternalAlloc(10);
474  EXPECT_NE(p, (char*)0);
475  char *p2 = (char*)InternalAlloc(20);
476  EXPECT_NE(p2, (char*)0);
477  EXPECT_NE(p2, p);
478  InternalFree(p);
479  InternalFree(p2);
480}
481
482TEST(Allocator, Stress) {
483  const int kCount = 1000;
484  char *ptrs[kCount];
485  unsigned rnd = 42;
486  for (int i = 0; i < kCount; i++) {
487    uptr sz = rand_r(&rnd) % 1000;
488    char *p = (char*)InternalAlloc(sz);
489    EXPECT_NE(p, (char*)0);
490    ptrs[i] = p;
491  }
492  for (int i = 0; i < kCount; i++) {
493    InternalFree(ptrs[i]);
494  }
495}
496
497TEST(Allocator, ScopedBuffer) {
498  const int kSize = 512;
499  {
500    InternalScopedBuffer<int> int_buf(kSize);
501    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
502  }
503  InternalScopedBuffer<char> char_buf(kSize);
504  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
505  internal_memset(char_buf.data(), 'c', kSize);
506  for (int i = 0; i < kSize; i++) {
507    EXPECT_EQ('c', char_buf[i]);
508  }
509}
510
511#endif  // #if TSAN_DEBUG==0
512