sanitizer_allocator_test.cc revision 68acb909db275a9d4fb16b37fab4c42b72671abb
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <pthread.h>
21#include <algorithm>
22#include <vector>
23
24// Too slow for debug build
25#if TSAN_DEBUG == 0
26
27#if SANITIZER_WORDSIZE == 64
28static const uptr kAllocatorSpace = 0x700000000000ULL;
29static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
30static const u64 kAddressSpaceSize = 1ULL << 47;
31
32typedef SizeClassAllocator64<
33  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
34
35typedef SizeClassAllocator64<
36  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
37#else
38static const u64 kAddressSpaceSize = 1ULL << 32;
39#endif
40
41typedef SizeClassAllocator32<
42  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
43
44template <class SizeClassMap>
45void TestSizeClassMap() {
46  typedef SizeClassMap SCMap;
47#if 0
48  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
49    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
50        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
51        SCMap::MaxCached(i));
52  }
53#endif
54  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
55    uptr s = SCMap::Size(c);
56    CHECK_EQ(SCMap::ClassID(s), c);
57    if (c != SCMap::kNumClasses - 1)
58      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
59    CHECK_EQ(SCMap::ClassID(s - 1), c);
60    if (c)
61      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
62  }
63  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
64
65  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
66    uptr c = SCMap::ClassID(s);
67    CHECK_LT(c, SCMap::kNumClasses);
68    CHECK_GE(SCMap::Size(c), s);
69    if (c > 0)
70      CHECK_LT(SCMap::Size(c-1), s);
71  }
72}
73
74TEST(SanitizerCommon, DefaultSizeClassMap) {
75  TestSizeClassMap<DefaultSizeClassMap>();
76}
77
78TEST(SanitizerCommon, CompactSizeClassMap) {
79  TestSizeClassMap<CompactSizeClassMap>();
80}
81
82template <class Allocator>
83void TestSizeClassAllocator() {
84  Allocator *a = new Allocator;
85  a->Init();
86
87  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
88    50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
89
90  std::vector<void *> allocated;
91
92  uptr last_total_allocated = 0;
93  for (int i = 0; i < 5; i++) {
94    // Allocate a bunch of chunks.
95    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
96      uptr size = sizes[s];
97      if (!a->CanAllocate(size, 1)) continue;
98      // printf("s = %ld\n", size);
99      uptr n_iter = std::max((uptr)2, 1000000 / size);
100      for (uptr i = 0; i < n_iter; i++) {
101        void *x = a->Allocate(size, 1);
102        allocated.push_back(x);
103        CHECK_EQ(x, a->GetBlockBegin(x));
104        CHECK_EQ(x, a->GetBlockBegin((char*)x + size - 1));
105        CHECK(a->PointerIsMine(x));
106        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
107        uptr class_id = a->GetSizeClass(x);
108        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
109        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
110        metadata[0] = reinterpret_cast<uptr>(x) + 1;
111        metadata[1] = 0xABCD;
112      }
113    }
114    // Deallocate all.
115    for (uptr i = 0; i < allocated.size(); i++) {
116      void *x = allocated[i];
117      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
118      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
119      CHECK_EQ(metadata[1], 0xABCD);
120      a->Deallocate(x);
121    }
122    allocated.clear();
123    uptr total_allocated = a->TotalMemoryUsed();
124    if (last_total_allocated == 0)
125      last_total_allocated = total_allocated;
126    CHECK_EQ(last_total_allocated, total_allocated);
127  }
128
129  a->TestOnlyUnmap();
130  delete a;
131}
132
133#if SANITIZER_WORDSIZE == 64
134TEST(SanitizerCommon, SizeClassAllocator64) {
135  TestSizeClassAllocator<Allocator64>();
136}
137
138TEST(SanitizerCommon, SizeClassAllocator64Compact) {
139  TestSizeClassAllocator<Allocator64Compact>();
140}
141#endif
142
143TEST(SanitizerCommon, SizeClassAllocator32Compact) {
144  TestSizeClassAllocator<Allocator32Compact>();
145}
146
147template <class Allocator>
148void SizeClassAllocatorMetadataStress() {
149  Allocator *a = new Allocator;
150  a->Init();
151  static volatile void *sink;
152
153  const uptr kNumAllocs = 10000;
154  void *allocated[kNumAllocs];
155  for (uptr i = 0; i < kNumAllocs; i++) {
156    uptr size = (i % 4096) + 1;
157    void *x = a->Allocate(size, 1);
158    allocated[i] = x;
159  }
160  // Get Metadata kNumAllocs^2 times.
161  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
162    sink = a->GetMetaData(allocated[i % kNumAllocs]);
163  }
164  for (uptr i = 0; i < kNumAllocs; i++) {
165    a->Deallocate(allocated[i]);
166  }
167
168  a->TestOnlyUnmap();
169  (void)sink;
170  delete a;
171}
172
173#if SANITIZER_WORDSIZE == 64
174TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
175  SizeClassAllocatorMetadataStress<Allocator64>();
176}
177
178TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
179  SizeClassAllocatorMetadataStress<Allocator64Compact>();
180}
181#endif
182TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
183  SizeClassAllocatorMetadataStress<Allocator32Compact>();
184}
185
186struct TestMapUnmapCallback {
187  static int map_count, unmap_count;
188  void OnMap(uptr p, uptr size) const { map_count++; }
189  void OnUnmap(uptr p, uptr size) const { unmap_count++; }
190};
191int TestMapUnmapCallback::map_count;
192int TestMapUnmapCallback::unmap_count;
193
194#if SANITIZER_WORDSIZE == 64
195TEST(SanitizerCommon, SizeClassAllocator64MapUnmapCallback) {
196  TestMapUnmapCallback::map_count = 0;
197  TestMapUnmapCallback::unmap_count = 0;
198  typedef SizeClassAllocator64<
199      kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap,
200      TestMapUnmapCallback> Allocator64WithCallBack;
201  Allocator64WithCallBack *a = new Allocator64WithCallBack;
202  a->Init();
203  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
204  a->Allocate(100, 1);
205  EXPECT_EQ(TestMapUnmapCallback::map_count, 3);  // State + alloc + metadata.
206  a->TestOnlyUnmap();
207  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);  // The whole thing.
208  delete a;
209}
210#endif
211
212TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
213  TestMapUnmapCallback::map_count = 0;
214  TestMapUnmapCallback::unmap_count = 0;
215  typedef SizeClassAllocator32<
216      0, kAddressSpaceSize, 16, CompactSizeClassMap,
217      TestMapUnmapCallback> Allocator32WithCallBack;
218  Allocator32WithCallBack *a = new Allocator32WithCallBack;
219  a->Init();
220  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
221  a->Allocate(100, 1);
222  EXPECT_EQ(TestMapUnmapCallback::map_count, 2);  // alloc.
223  a->TestOnlyUnmap();
224  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2);  // The whole thing + alloc.
225  delete a;
226  // fprintf(stderr, "Map: %d Unmap: %d\n",
227  //         TestMapUnmapCallback::map_count,
228  //         TestMapUnmapCallback::unmap_count);
229}
230
231TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
232  TestMapUnmapCallback::map_count = 0;
233  TestMapUnmapCallback::unmap_count = 0;
234  LargeMmapAllocator<TestMapUnmapCallback> a;
235  a.Init();
236  void *x = a.Allocate(1 << 20, 1);
237  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
238  a.Deallocate(x);
239  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
240}
241
242template<class Allocator>
243void FailInAssertionOnOOM() {
244  Allocator a;
245  a.Init();
246  const uptr size = 1 << 20;
247  for (int i = 0; i < 1000000; i++) {
248    a.Allocate(size, 1);
249  }
250
251  a.TestOnlyUnmap();
252}
253
254#if SANITIZER_WORDSIZE == 64
255TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
256  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
257}
258#endif
259
260TEST(SanitizerCommon, LargeMmapAllocator) {
261  LargeMmapAllocator<> a;
262  a.Init();
263
264  static const int kNumAllocs = 100;
265  void *allocated[kNumAllocs];
266  static const uptr size = 1000;
267  // Allocate some.
268  for (int i = 0; i < kNumAllocs; i++) {
269    allocated[i] = a.Allocate(size, 1);
270  }
271  // Deallocate all.
272  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
273  for (int i = 0; i < kNumAllocs; i++) {
274    void *p = allocated[i];
275    CHECK(a.PointerIsMine(p));
276    a.Deallocate(p);
277  }
278  // Check that non left.
279  CHECK_EQ(a.TotalMemoryUsed(), 0);
280
281  // Allocate some more, also add metadata.
282  for (int i = 0; i < kNumAllocs; i++) {
283    void *x = a.Allocate(size, 1);
284    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
285    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
286    *meta = i;
287    allocated[i] = x;
288  }
289  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
290  // Deallocate all in reverse order.
291  for (int i = 0; i < kNumAllocs; i++) {
292    int idx = kNumAllocs - i - 1;
293    void *p = allocated[idx];
294    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
295    CHECK_EQ(*meta, idx);
296    CHECK(a.PointerIsMine(p));
297    a.Deallocate(p);
298  }
299  CHECK_EQ(a.TotalMemoryUsed(), 0);
300  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
301  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
302    for (int i = 0; i < kNumAllocs; i++) {
303      uptr size = ((i % 10) + 1) * 4096;
304      allocated[i] = a.Allocate(size, alignment);
305      CHECK_EQ(0, (uptr)allocated[i] % alignment);
306      char *p = (char*)allocated[i];
307      p[0] = p[size - 1] = 0;
308    }
309    for (int i = 0; i < kNumAllocs; i++) {
310      a.Deallocate(allocated[i]);
311    }
312  }
313}
314
315template
316<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
317void TestCombinedAllocator() {
318  typedef
319      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
320      Allocator;
321  Allocator *a = new Allocator;
322  a->Init();
323
324  AllocatorCache cache;
325  cache.Init();
326
327  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
328  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
329  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
330  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
331  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
332
333  const uptr kNumAllocs = 100000;
334  const uptr kNumIter = 10;
335  for (uptr iter = 0; iter < kNumIter; iter++) {
336    std::vector<void*> allocated;
337    for (uptr i = 0; i < kNumAllocs; i++) {
338      uptr size = (i % (1 << 14)) + 1;
339      if ((i % 1024) == 0)
340        size = 1 << (10 + (i % 14));
341      void *x = a->Allocate(&cache, size, 1);
342      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
343      CHECK_EQ(*meta, 0);
344      *meta = size;
345      allocated.push_back(x);
346    }
347
348    random_shuffle(allocated.begin(), allocated.end());
349
350    for (uptr i = 0; i < kNumAllocs; i++) {
351      void *x = allocated[i];
352      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
353      CHECK_NE(*meta, 0);
354      CHECK(a->PointerIsMine(x));
355      *meta = 0;
356      a->Deallocate(&cache, x);
357    }
358    allocated.clear();
359    a->SwallowCache(&cache);
360  }
361  a->TestOnlyUnmap();
362}
363
364#if SANITIZER_WORDSIZE == 64
365TEST(SanitizerCommon, CombinedAllocator64) {
366  TestCombinedAllocator<Allocator64,
367      LargeMmapAllocator<>,
368      SizeClassAllocatorLocalCache<Allocator64> > ();
369}
370
371TEST(SanitizerCommon, CombinedAllocator64Compact) {
372  TestCombinedAllocator<Allocator64Compact,
373      LargeMmapAllocator<>,
374      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
375}
376#endif
377
378TEST(SanitizerCommon, CombinedAllocator32Compact) {
379  TestCombinedAllocator<Allocator32Compact,
380      LargeMmapAllocator<>,
381      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
382}
383
384template <class AllocatorCache>
385void TestSizeClassAllocatorLocalCache() {
386  static THREADLOCAL AllocatorCache static_allocator_cache;
387  static_allocator_cache.Init();
388  AllocatorCache cache;
389  typedef typename AllocatorCache::Allocator Allocator;
390  Allocator *a = new Allocator();
391
392  a->Init();
393  cache.Init();
394
395  const uptr kNumAllocs = 10000;
396  const int kNumIter = 100;
397  uptr saved_total = 0;
398  for (int i = 0; i < kNumIter; i++) {
399    void *allocated[kNumAllocs];
400    for (uptr i = 0; i < kNumAllocs; i++) {
401      allocated[i] = cache.Allocate(a, 0);
402    }
403    for (uptr i = 0; i < kNumAllocs; i++) {
404      cache.Deallocate(a, 0, allocated[i]);
405    }
406    cache.Drain(a);
407    uptr total_allocated = a->TotalMemoryUsed();
408    if (saved_total)
409      CHECK_EQ(saved_total, total_allocated);
410    saved_total = total_allocated;
411  }
412
413  a->TestOnlyUnmap();
414  delete a;
415}
416
417#if SANITIZER_WORDSIZE == 64
418TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
419  TestSizeClassAllocatorLocalCache<
420      SizeClassAllocatorLocalCache<Allocator64> >();
421}
422
423TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
424  TestSizeClassAllocatorLocalCache<
425      SizeClassAllocatorLocalCache<Allocator64Compact> >();
426}
427#endif
428
429TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
430  TestSizeClassAllocatorLocalCache<
431      SizeClassAllocatorLocalCache<Allocator32Compact> >();
432}
433
434#if SANITIZER_WORDSIZE == 64
435typedef SizeClassAllocatorLocalCache<Allocator64> AllocatorCache;
436static THREADLOCAL AllocatorCache static_allocator_cache;
437
438void *AllocatorLeakTestWorker(void *arg) {
439  typedef AllocatorCache::Allocator Allocator;
440  Allocator *a = (Allocator*)(arg);
441  static_allocator_cache.Allocate(a, 10);
442  static_allocator_cache.Drain(a);
443  return 0;
444}
445
446TEST(SanitizerCommon, AllocatorLeakTest) {
447  typedef AllocatorCache::Allocator Allocator;
448  Allocator a;
449  a.Init();
450  uptr total_used_memory = 0;
451  for (int i = 0; i < 100; i++) {
452    pthread_t t;
453    EXPECT_EQ(0, pthread_create(&t, 0, AllocatorLeakTestWorker, &a));
454    EXPECT_EQ(0, pthread_join(t, 0));
455    if (i == 0)
456      total_used_memory = a.TotalMemoryUsed();
457    EXPECT_EQ(a.TotalMemoryUsed(), total_used_memory);
458  }
459
460  a.TestOnlyUnmap();
461}
462#endif
463
464TEST(Allocator, Basic) {
465  char *p = (char*)InternalAlloc(10);
466  EXPECT_NE(p, (char*)0);
467  char *p2 = (char*)InternalAlloc(20);
468  EXPECT_NE(p2, (char*)0);
469  EXPECT_NE(p2, p);
470  InternalFree(p);
471  InternalFree(p2);
472}
473
474TEST(Allocator, Stress) {
475  const int kCount = 1000;
476  char *ptrs[kCount];
477  unsigned rnd = 42;
478  for (int i = 0; i < kCount; i++) {
479    uptr sz = rand_r(&rnd) % 1000;
480    char *p = (char*)InternalAlloc(sz);
481    EXPECT_NE(p, (char*)0);
482    ptrs[i] = p;
483  }
484  for (int i = 0; i < kCount; i++) {
485    InternalFree(ptrs[i]);
486  }
487}
488
489TEST(Allocator, ScopedBuffer) {
490  const int kSize = 512;
491  {
492    InternalScopedBuffer<int> int_buf(kSize);
493    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
494  }
495  InternalScopedBuffer<char> char_buf(kSize);
496  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
497  internal_memset(char_buf.data(), 'c', kSize);
498  for (int i = 0; i < kSize; i++) {
499    EXPECT_EQ('c', char_buf[i]);
500  }
501}
502
503#endif  // #if TSAN_DEBUG==0
504