sanitizer_allocator_test.cc revision da1f82bcbe50b28d11e93eacdcd529e9f0e2ef38
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <pthread.h>
21#include <algorithm>
22#include <vector>
23
24// Too slow for debug build
25#if TSAN_DEBUG == 0
26
27#if SANITIZER_WORDSIZE == 64
28static const uptr kAllocatorSpace = 0x700000000000ULL;
29static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
30static const u64 kAddressSpaceSize = 1ULL << 47;
31
32typedef SizeClassAllocator64<
33  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
34
35typedef SizeClassAllocator64<
36  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
37#else
38static const u64 kAddressSpaceSize = 1ULL << 32;
39#endif
40
41typedef SizeClassAllocator32<
42  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
43
44template <class SizeClassMap>
45void TestSizeClassMap() {
46  typedef SizeClassMap SCMap;
47#if 0
48  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
49    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
50        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
51        SCMap::MaxCached(i));
52  }
53#endif
54  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
55    uptr s = SCMap::Size(c);
56    CHECK_EQ(SCMap::ClassID(s), c);
57    if (c != SCMap::kNumClasses - 1)
58      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
59    CHECK_EQ(SCMap::ClassID(s - 1), c);
60    if (c)
61      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
62  }
63  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
64
65  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
66    uptr c = SCMap::ClassID(s);
67    CHECK_LT(c, SCMap::kNumClasses);
68    CHECK_GE(SCMap::Size(c), s);
69    if (c > 0)
70      CHECK_LT(SCMap::Size(c-1), s);
71  }
72}
73
74TEST(SanitizerCommon, DefaultSizeClassMap) {
75  TestSizeClassMap<DefaultSizeClassMap>();
76}
77
78TEST(SanitizerCommon, CompactSizeClassMap) {
79  TestSizeClassMap<CompactSizeClassMap>();
80}
81
82template <class Allocator>
83void TestSizeClassAllocator() {
84  Allocator *a = new Allocator;
85  a->Init();
86
87  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
88    50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
89
90  std::vector<void *> allocated;
91
92  uptr last_total_allocated = 0;
93  for (int i = 0; i < 5; i++) {
94    // Allocate a bunch of chunks.
95    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
96      uptr size = sizes[s];
97      if (!a->CanAllocate(size, 1)) continue;
98      // printf("s = %ld\n", size);
99      uptr n_iter = std::max((uptr)6, 1000000 / size);
100      for (uptr i = 0; i < n_iter; i++) {
101        char *x = (char*)a->Allocate(size, 1);
102        x[0] = 0;
103        x[size - 1] = 0;
104        x[size / 2] = 0;
105        allocated.push_back(x);
106        CHECK_EQ(x, a->GetBlockBegin(x));
107        CHECK_EQ(x, a->GetBlockBegin(x + size - 1));
108        CHECK(a->PointerIsMine(x));
109        CHECK(a->PointerIsMine(x + size - 1));
110        CHECK(a->PointerIsMine(x + size / 2));
111        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
112        uptr class_id = a->GetSizeClass(x);
113        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
114        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
115        metadata[0] = reinterpret_cast<uptr>(x) + 1;
116        metadata[1] = 0xABCD;
117      }
118    }
119    // Deallocate all.
120    for (uptr i = 0; i < allocated.size(); i++) {
121      void *x = allocated[i];
122      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
123      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
124      CHECK_EQ(metadata[1], 0xABCD);
125      a->Deallocate(x);
126    }
127    allocated.clear();
128    uptr total_allocated = a->TotalMemoryUsed();
129    if (last_total_allocated == 0)
130      last_total_allocated = total_allocated;
131    CHECK_EQ(last_total_allocated, total_allocated);
132  }
133
134  a->TestOnlyUnmap();
135  delete a;
136}
137
138#if SANITIZER_WORDSIZE == 64
139TEST(SanitizerCommon, SizeClassAllocator64) {
140  TestSizeClassAllocator<Allocator64>();
141}
142
143TEST(SanitizerCommon, SizeClassAllocator64Compact) {
144  TestSizeClassAllocator<Allocator64Compact>();
145}
146#endif
147
148TEST(SanitizerCommon, SizeClassAllocator32Compact) {
149  TestSizeClassAllocator<Allocator32Compact>();
150}
151
152template <class Allocator>
153void SizeClassAllocatorMetadataStress() {
154  Allocator *a = new Allocator;
155  a->Init();
156  static volatile void *sink;
157
158  const uptr kNumAllocs = 10000;
159  void *allocated[kNumAllocs];
160  for (uptr i = 0; i < kNumAllocs; i++) {
161    uptr size = (i % 4096) + 1;
162    void *x = a->Allocate(size, 1);
163    allocated[i] = x;
164  }
165  // Get Metadata kNumAllocs^2 times.
166  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
167    sink = a->GetMetaData(allocated[i % kNumAllocs]);
168  }
169  for (uptr i = 0; i < kNumAllocs; i++) {
170    a->Deallocate(allocated[i]);
171  }
172
173  a->TestOnlyUnmap();
174  (void)sink;
175  delete a;
176}
177
178#if SANITIZER_WORDSIZE == 64
179TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
180  SizeClassAllocatorMetadataStress<Allocator64>();
181}
182
183TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
184  SizeClassAllocatorMetadataStress<Allocator64Compact>();
185}
186#endif
187TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
188  SizeClassAllocatorMetadataStress<Allocator32Compact>();
189}
190
191struct TestMapUnmapCallback {
192  static int map_count, unmap_count;
193  void OnMap(uptr p, uptr size) const { map_count++; }
194  void OnUnmap(uptr p, uptr size) const { unmap_count++; }
195};
196int TestMapUnmapCallback::map_count;
197int TestMapUnmapCallback::unmap_count;
198
199#if SANITIZER_WORDSIZE == 64
200TEST(SanitizerCommon, SizeClassAllocator64MapUnmapCallback) {
201  TestMapUnmapCallback::map_count = 0;
202  TestMapUnmapCallback::unmap_count = 0;
203  typedef SizeClassAllocator64<
204      kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap,
205      TestMapUnmapCallback> Allocator64WithCallBack;
206  Allocator64WithCallBack *a = new Allocator64WithCallBack;
207  a->Init();
208  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
209  a->Allocate(100, 1);
210  EXPECT_EQ(TestMapUnmapCallback::map_count, 3);  // State + alloc + metadata.
211  a->TestOnlyUnmap();
212  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);  // The whole thing.
213  delete a;
214}
215#endif
216
217TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
218  TestMapUnmapCallback::map_count = 0;
219  TestMapUnmapCallback::unmap_count = 0;
220  typedef SizeClassAllocator32<
221      0, kAddressSpaceSize, 16, CompactSizeClassMap,
222      TestMapUnmapCallback> Allocator32WithCallBack;
223  Allocator32WithCallBack *a = new Allocator32WithCallBack;
224  a->Init();
225  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
226  a->Allocate(100, 1);
227  EXPECT_EQ(TestMapUnmapCallback::map_count, 2);  // alloc.
228  a->TestOnlyUnmap();
229  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2);  // The whole thing + alloc.
230  delete a;
231  // fprintf(stderr, "Map: %d Unmap: %d\n",
232  //         TestMapUnmapCallback::map_count,
233  //         TestMapUnmapCallback::unmap_count);
234}
235
236TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
237  TestMapUnmapCallback::map_count = 0;
238  TestMapUnmapCallback::unmap_count = 0;
239  LargeMmapAllocator<TestMapUnmapCallback> a;
240  a.Init();
241  void *x = a.Allocate(1 << 20, 1);
242  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
243  a.Deallocate(x);
244  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
245}
246
247template<class Allocator>
248void FailInAssertionOnOOM() {
249  Allocator a;
250  a.Init();
251  const uptr size = 1 << 20;
252  for (int i = 0; i < 1000000; i++) {
253    a.Allocate(size, 1);
254  }
255
256  a.TestOnlyUnmap();
257}
258
259#if SANITIZER_WORDSIZE == 64
260TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
261  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
262}
263#endif
264
265TEST(SanitizerCommon, LargeMmapAllocator) {
266  LargeMmapAllocator<> a;
267  a.Init();
268
269  static const int kNumAllocs = 100;
270  char *allocated[kNumAllocs];
271  static const uptr size = 1000;
272  // Allocate some.
273  for (int i = 0; i < kNumAllocs; i++) {
274    allocated[i] = (char *)a.Allocate(size, 1);
275  }
276  // Deallocate all.
277  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
278  for (int i = 0; i < kNumAllocs; i++) {
279    char *p = allocated[i];
280    CHECK(a.PointerIsMine(p));
281    a.Deallocate(p);
282  }
283  // Check that non left.
284  CHECK_EQ(a.TotalMemoryUsed(), 0);
285
286  // Allocate some more, also add metadata.
287  for (int i = 0; i < kNumAllocs; i++) {
288    char *x = (char *)a.Allocate(size, 1);
289    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
290    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
291    *meta = i;
292    allocated[i] = x;
293  }
294  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
295  // Deallocate all in reverse order.
296  for (int i = 0; i < kNumAllocs; i++) {
297    int idx = kNumAllocs - i - 1;
298    char *p = allocated[idx];
299    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
300    CHECK_EQ(*meta, idx);
301    CHECK(a.PointerIsMine(p));
302    a.Deallocate(p);
303  }
304  CHECK_EQ(a.TotalMemoryUsed(), 0);
305  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
306  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
307    for (int i = 0; i < kNumAllocs; i++) {
308      uptr size = ((i % 10) + 1) * 4096;
309      char *p = allocated[i] = (char *)a.Allocate(size, alignment);
310      CHECK_EQ(p, a.GetBlockBegin(p));
311      CHECK_EQ(p, a.GetBlockBegin(p + size - 1));
312      CHECK_EQ(p, a.GetBlockBegin(p + size / 2));
313      CHECK_EQ(0, (uptr)allocated[i] % alignment);
314      p[0] = p[size - 1] = 0;
315    }
316    for (int i = 0; i < kNumAllocs; i++) {
317      a.Deallocate(allocated[i]);
318    }
319  }
320}
321
322template
323<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
324void TestCombinedAllocator() {
325  typedef
326      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
327      Allocator;
328  Allocator *a = new Allocator;
329  a->Init();
330
331  AllocatorCache cache;
332  cache.Init();
333
334  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
335  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
336  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
337  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
338  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
339
340  const uptr kNumAllocs = 100000;
341  const uptr kNumIter = 10;
342  for (uptr iter = 0; iter < kNumIter; iter++) {
343    std::vector<void*> allocated;
344    for (uptr i = 0; i < kNumAllocs; i++) {
345      uptr size = (i % (1 << 14)) + 1;
346      if ((i % 1024) == 0)
347        size = 1 << (10 + (i % 14));
348      void *x = a->Allocate(&cache, size, 1);
349      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
350      CHECK_EQ(*meta, 0);
351      *meta = size;
352      allocated.push_back(x);
353    }
354
355    random_shuffle(allocated.begin(), allocated.end());
356
357    for (uptr i = 0; i < kNumAllocs; i++) {
358      void *x = allocated[i];
359      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
360      CHECK_NE(*meta, 0);
361      CHECK(a->PointerIsMine(x));
362      *meta = 0;
363      a->Deallocate(&cache, x);
364    }
365    allocated.clear();
366    a->SwallowCache(&cache);
367  }
368  a->TestOnlyUnmap();
369}
370
371#if SANITIZER_WORDSIZE == 64
372TEST(SanitizerCommon, CombinedAllocator64) {
373  TestCombinedAllocator<Allocator64,
374      LargeMmapAllocator<>,
375      SizeClassAllocatorLocalCache<Allocator64> > ();
376}
377
378TEST(SanitizerCommon, CombinedAllocator64Compact) {
379  TestCombinedAllocator<Allocator64Compact,
380      LargeMmapAllocator<>,
381      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
382}
383#endif
384
385TEST(SanitizerCommon, CombinedAllocator32Compact) {
386  TestCombinedAllocator<Allocator32Compact,
387      LargeMmapAllocator<>,
388      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
389}
390
391template <class AllocatorCache>
392void TestSizeClassAllocatorLocalCache() {
393  static THREADLOCAL AllocatorCache static_allocator_cache;
394  static_allocator_cache.Init();
395  AllocatorCache cache;
396  typedef typename AllocatorCache::Allocator Allocator;
397  Allocator *a = new Allocator();
398
399  a->Init();
400  cache.Init();
401
402  const uptr kNumAllocs = 10000;
403  const int kNumIter = 100;
404  uptr saved_total = 0;
405  for (int i = 0; i < kNumIter; i++) {
406    void *allocated[kNumAllocs];
407    for (uptr i = 0; i < kNumAllocs; i++) {
408      allocated[i] = cache.Allocate(a, 0);
409    }
410    for (uptr i = 0; i < kNumAllocs; i++) {
411      cache.Deallocate(a, 0, allocated[i]);
412    }
413    cache.Drain(a);
414    uptr total_allocated = a->TotalMemoryUsed();
415    if (saved_total)
416      CHECK_EQ(saved_total, total_allocated);
417    saved_total = total_allocated;
418  }
419
420  a->TestOnlyUnmap();
421  delete a;
422}
423
424#if SANITIZER_WORDSIZE == 64
425TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
426  TestSizeClassAllocatorLocalCache<
427      SizeClassAllocatorLocalCache<Allocator64> >();
428}
429
430TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
431  TestSizeClassAllocatorLocalCache<
432      SizeClassAllocatorLocalCache<Allocator64Compact> >();
433}
434#endif
435
436TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
437  TestSizeClassAllocatorLocalCache<
438      SizeClassAllocatorLocalCache<Allocator32Compact> >();
439}
440
441#if SANITIZER_WORDSIZE == 64
442typedef SizeClassAllocatorLocalCache<Allocator64> AllocatorCache;
443static THREADLOCAL AllocatorCache static_allocator_cache;
444
445void *AllocatorLeakTestWorker(void *arg) {
446  typedef AllocatorCache::Allocator Allocator;
447  Allocator *a = (Allocator*)(arg);
448  static_allocator_cache.Allocate(a, 10);
449  static_allocator_cache.Drain(a);
450  return 0;
451}
452
453TEST(SanitizerCommon, AllocatorLeakTest) {
454  typedef AllocatorCache::Allocator Allocator;
455  Allocator a;
456  a.Init();
457  uptr total_used_memory = 0;
458  for (int i = 0; i < 100; i++) {
459    pthread_t t;
460    EXPECT_EQ(0, pthread_create(&t, 0, AllocatorLeakTestWorker, &a));
461    EXPECT_EQ(0, pthread_join(t, 0));
462    if (i == 0)
463      total_used_memory = a.TotalMemoryUsed();
464    EXPECT_EQ(a.TotalMemoryUsed(), total_used_memory);
465  }
466
467  a.TestOnlyUnmap();
468}
469#endif
470
471TEST(Allocator, Basic) {
472  char *p = (char*)InternalAlloc(10);
473  EXPECT_NE(p, (char*)0);
474  char *p2 = (char*)InternalAlloc(20);
475  EXPECT_NE(p2, (char*)0);
476  EXPECT_NE(p2, p);
477  InternalFree(p);
478  InternalFree(p2);
479}
480
481TEST(Allocator, Stress) {
482  const int kCount = 1000;
483  char *ptrs[kCount];
484  unsigned rnd = 42;
485  for (int i = 0; i < kCount; i++) {
486    uptr sz = rand_r(&rnd) % 1000;
487    char *p = (char*)InternalAlloc(sz);
488    EXPECT_NE(p, (char*)0);
489    ptrs[i] = p;
490  }
491  for (int i = 0; i < kCount; i++) {
492    InternalFree(ptrs[i]);
493  }
494}
495
496TEST(Allocator, ScopedBuffer) {
497  const int kSize = 512;
498  {
499    InternalScopedBuffer<int> int_buf(kSize);
500    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
501  }
502  InternalScopedBuffer<char> char_buf(kSize);
503  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
504  internal_memset(char_buf.data(), 'c', kSize);
505  for (int i = 0; i < kSize; i++) {
506    EXPECT_EQ('c', char_buf[i]);
507  }
508}
509
510#endif  // #if TSAN_DEBUG==0
511