sanitizer_allocator_test.cc revision 48ddbef1d051875b2d0b204e8d78300b58d80a85
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "sanitizer_test_utils.h"
18
19#include "gtest/gtest.h"
20
21#include <stdlib.h>
22#include <pthread.h>
23#include <algorithm>
24#include <vector>
25
26// Too slow for debug build
27#if TSAN_DEBUG == 0
28
29#if SANITIZER_WORDSIZE == 64
30static const uptr kAllocatorSpace = 0x700000000000ULL;
31static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
32static const u64 kAddressSpaceSize = 1ULL << 47;
33
34typedef SizeClassAllocator64<
35  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
36
37typedef SizeClassAllocator64<
38  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
39#else
40static const u64 kAddressSpaceSize = 1ULL << 32;
41#endif
42
43typedef SizeClassAllocator32<
44  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
45
46template <class SizeClassMap>
47void TestSizeClassMap() {
48  typedef SizeClassMap SCMap;
49  // SCMap::Print();
50  SCMap::Validate();
51}
52
53TEST(SanitizerCommon, DefaultSizeClassMap) {
54  TestSizeClassMap<DefaultSizeClassMap>();
55}
56
57TEST(SanitizerCommon, CompactSizeClassMap) {
58  TestSizeClassMap<CompactSizeClassMap>();
59}
60
61template <class Allocator>
62void TestSizeClassAllocator() {
63  Allocator *a = new Allocator;
64  a->Init();
65  SizeClassAllocatorLocalCache<Allocator> cache;
66  cache.Init();
67
68  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
69    50000, 60000, 100000, 120000, 300000, 500000, 1000000, 2000000};
70
71  std::vector<void *> allocated;
72
73  uptr last_total_allocated = 0;
74  for (int i = 0; i < 3; i++) {
75    // Allocate a bunch of chunks.
76    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
77      uptr size = sizes[s];
78      if (!a->CanAllocate(size, 1)) continue;
79      // printf("s = %ld\n", size);
80      uptr n_iter = std::max((uptr)6, 10000000 / size);
81      // fprintf(stderr, "size: %ld iter: %ld\n", size, n_iter);
82      for (uptr i = 0; i < n_iter; i++) {
83        uptr class_id0 = Allocator::SizeClassMapT::ClassID(size);
84        char *x = (char*)cache.Allocate(a, class_id0);
85        x[0] = 0;
86        x[size - 1] = 0;
87        x[size / 2] = 0;
88        allocated.push_back(x);
89        CHECK_EQ(x, a->GetBlockBegin(x));
90        CHECK_EQ(x, a->GetBlockBegin(x + size - 1));
91        CHECK(a->PointerIsMine(x));
92        CHECK(a->PointerIsMine(x + size - 1));
93        CHECK(a->PointerIsMine(x + size / 2));
94        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
95        uptr class_id = a->GetSizeClass(x);
96        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
97        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
98        metadata[0] = reinterpret_cast<uptr>(x) + 1;
99        metadata[1] = 0xABCD;
100      }
101    }
102    // Deallocate all.
103    for (uptr i = 0; i < allocated.size(); i++) {
104      void *x = allocated[i];
105      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
106      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
107      CHECK_EQ(metadata[1], 0xABCD);
108      cache.Deallocate(a, a->GetSizeClass(x), x);
109    }
110    allocated.clear();
111    uptr total_allocated = a->TotalMemoryUsed();
112    if (last_total_allocated == 0)
113      last_total_allocated = total_allocated;
114    CHECK_EQ(last_total_allocated, total_allocated);
115  }
116
117  a->TestOnlyUnmap();
118  delete a;
119}
120
121#if SANITIZER_WORDSIZE == 64
122TEST(SanitizerCommon, SizeClassAllocator64) {
123  TestSizeClassAllocator<Allocator64>();
124}
125
126TEST(SanitizerCommon, SizeClassAllocator64Compact) {
127  TestSizeClassAllocator<Allocator64Compact>();
128}
129#endif
130
131TEST(SanitizerCommon, SizeClassAllocator32Compact) {
132  TestSizeClassAllocator<Allocator32Compact>();
133}
134
135template <class Allocator>
136void SizeClassAllocatorMetadataStress() {
137  Allocator *a = new Allocator;
138  a->Init();
139  SizeClassAllocatorLocalCache<Allocator> cache;
140  cache.Init();
141  static volatile void *sink;
142
143  const uptr kNumAllocs = 10000;
144  void *allocated[kNumAllocs];
145  for (uptr i = 0; i < kNumAllocs; i++) {
146    void *x = cache.Allocate(a, 1 + i % 50);
147    allocated[i] = x;
148  }
149  // Get Metadata kNumAllocs^2 times.
150  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
151    sink = a->GetMetaData(allocated[i % kNumAllocs]);
152  }
153  for (uptr i = 0; i < kNumAllocs; i++) {
154    cache.Deallocate(a, 1 + i % 50, allocated[i]);
155  }
156
157  a->TestOnlyUnmap();
158  (void)sink;
159  delete a;
160}
161
162#if SANITIZER_WORDSIZE == 64
163TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
164  SizeClassAllocatorMetadataStress<Allocator64>();
165}
166
167TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
168  SizeClassAllocatorMetadataStress<Allocator64Compact>();
169}
170#endif
171TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
172  SizeClassAllocatorMetadataStress<Allocator32Compact>();
173}
174
175struct TestMapUnmapCallback {
176  static int map_count, unmap_count;
177  void OnMap(uptr p, uptr size) const { map_count++; }
178  void OnUnmap(uptr p, uptr size) const { unmap_count++; }
179};
180int TestMapUnmapCallback::map_count;
181int TestMapUnmapCallback::unmap_count;
182
183#if SANITIZER_WORDSIZE == 64
184TEST(SanitizerCommon, SizeClassAllocator64MapUnmapCallback) {
185  TestMapUnmapCallback::map_count = 0;
186  TestMapUnmapCallback::unmap_count = 0;
187  typedef SizeClassAllocator64<
188      kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap,
189      TestMapUnmapCallback> Allocator64WithCallBack;
190  Allocator64WithCallBack *a = new Allocator64WithCallBack;
191  a->Init();
192  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
193  SizeClassAllocatorLocalCache<Allocator64WithCallBack> cache;
194  cache.Init();
195  a->AllocateBatch(&cache, 64);
196  EXPECT_EQ(TestMapUnmapCallback::map_count, 3);  // State + alloc + metadata.
197  a->TestOnlyUnmap();
198  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);  // The whole thing.
199  delete a;
200}
201#endif
202
203TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
204  TestMapUnmapCallback::map_count = 0;
205  TestMapUnmapCallback::unmap_count = 0;
206  typedef SizeClassAllocator32<
207      0, kAddressSpaceSize, 16, CompactSizeClassMap,
208      TestMapUnmapCallback> Allocator32WithCallBack;
209  Allocator32WithCallBack *a = new Allocator32WithCallBack;
210  a->Init();
211  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
212  SizeClassAllocatorLocalCache<Allocator32WithCallBack>  cache;
213  cache.Init();
214  a->AllocateBatch(&cache, 64);
215  EXPECT_EQ(TestMapUnmapCallback::map_count, 2);  // alloc.
216  a->TestOnlyUnmap();
217  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2);  // The whole thing + alloc.
218  delete a;
219  // fprintf(stderr, "Map: %d Unmap: %d\n",
220  //         TestMapUnmapCallback::map_count,
221  //         TestMapUnmapCallback::unmap_count);
222}
223
224TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
225  TestMapUnmapCallback::map_count = 0;
226  TestMapUnmapCallback::unmap_count = 0;
227  LargeMmapAllocator<TestMapUnmapCallback> a;
228  a.Init();
229  void *x = a.Allocate(1 << 20, 1);
230  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
231  a.Deallocate(x);
232  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
233}
234
235template<class Allocator>
236void FailInAssertionOnOOM() {
237  Allocator a;
238  a.Init();
239  SizeClassAllocatorLocalCache<Allocator> cache;
240  cache.Init();
241  for (int i = 0; i < 1000000; i++) {
242    a.AllocateBatch(&cache, 64);
243  }
244
245  a.TestOnlyUnmap();
246}
247
248#if SANITIZER_WORDSIZE == 64
249TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
250  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
251}
252#endif
253
254TEST(SanitizerCommon, LargeMmapAllocator) {
255  LargeMmapAllocator<> a;
256  a.Init();
257
258  static const int kNumAllocs = 1000;
259  char *allocated[kNumAllocs];
260  static const uptr size = 4000;
261  // Allocate some.
262  for (int i = 0; i < kNumAllocs; i++) {
263    allocated[i] = (char *)a.Allocate(size, 1);
264    CHECK(a.PointerIsMine(allocated[i]));
265  }
266  // Deallocate all.
267  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
268  for (int i = 0; i < kNumAllocs; i++) {
269    char *p = allocated[i];
270    CHECK(a.PointerIsMine(p));
271    a.Deallocate(p);
272  }
273  // Check that non left.
274  CHECK_EQ(a.TotalMemoryUsed(), 0);
275
276  // Allocate some more, also add metadata.
277  for (int i = 0; i < kNumAllocs; i++) {
278    char *x = (char *)a.Allocate(size, 1);
279    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
280    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
281    *meta = i;
282    allocated[i] = x;
283  }
284  for (int i = 0; i < kNumAllocs * kNumAllocs; i++) {
285    char *p = allocated[i % kNumAllocs];
286    CHECK(a.PointerIsMine(p));
287    CHECK(a.PointerIsMine(p + 2000));
288  }
289  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
290  // Deallocate all in reverse order.
291  for (int i = 0; i < kNumAllocs; i++) {
292    int idx = kNumAllocs - i - 1;
293    char *p = allocated[idx];
294    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
295    CHECK_EQ(*meta, idx);
296    CHECK(a.PointerIsMine(p));
297    a.Deallocate(p);
298  }
299  CHECK_EQ(a.TotalMemoryUsed(), 0);
300
301  // Test alignments.
302  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
303  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
304    const uptr kNumAlignedAllocs = 100;
305    for (uptr i = 0; i < kNumAlignedAllocs; i++) {
306      uptr size = ((i % 10) + 1) * 4096;
307      char *p = allocated[i] = (char *)a.Allocate(size, alignment);
308      CHECK_EQ(p, a.GetBlockBegin(p));
309      CHECK_EQ(p, a.GetBlockBegin(p + size - 1));
310      CHECK_EQ(p, a.GetBlockBegin(p + size / 2));
311      CHECK_EQ(0, (uptr)allocated[i] % alignment);
312      p[0] = p[size - 1] = 0;
313    }
314    for (uptr i = 0; i < kNumAlignedAllocs; i++) {
315      a.Deallocate(allocated[i]);
316    }
317  }
318}
319
320template
321<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
322void TestCombinedAllocator() {
323  typedef
324      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
325      Allocator;
326  Allocator *a = new Allocator;
327  a->Init();
328
329  AllocatorCache cache;
330  cache.Init();
331
332  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
333  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
334  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
335  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
336  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
337
338  const uptr kNumAllocs = 100000;
339  const uptr kNumIter = 10;
340  for (uptr iter = 0; iter < kNumIter; iter++) {
341    std::vector<void*> allocated;
342    for (uptr i = 0; i < kNumAllocs; i++) {
343      uptr size = (i % (1 << 14)) + 1;
344      if ((i % 1024) == 0)
345        size = 1 << (10 + (i % 14));
346      void *x = a->Allocate(&cache, size, 1);
347      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
348      CHECK_EQ(*meta, 0);
349      *meta = size;
350      allocated.push_back(x);
351    }
352
353    random_shuffle(allocated.begin(), allocated.end());
354
355    for (uptr i = 0; i < kNumAllocs; i++) {
356      void *x = allocated[i];
357      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
358      CHECK_NE(*meta, 0);
359      CHECK(a->PointerIsMine(x));
360      *meta = 0;
361      a->Deallocate(&cache, x);
362    }
363    allocated.clear();
364    a->SwallowCache(&cache);
365  }
366  a->TestOnlyUnmap();
367}
368
369#if SANITIZER_WORDSIZE == 64
370TEST(SanitizerCommon, CombinedAllocator64) {
371  TestCombinedAllocator<Allocator64,
372      LargeMmapAllocator<>,
373      SizeClassAllocatorLocalCache<Allocator64> > ();
374}
375
376TEST(SanitizerCommon, CombinedAllocator64Compact) {
377  TestCombinedAllocator<Allocator64Compact,
378      LargeMmapAllocator<>,
379      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
380}
381#endif
382
383TEST(SanitizerCommon, CombinedAllocator32Compact) {
384  TestCombinedAllocator<Allocator32Compact,
385      LargeMmapAllocator<>,
386      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
387}
388
389template <class AllocatorCache>
390void TestSizeClassAllocatorLocalCache() {
391  static AllocatorCache static_allocator_cache;
392  static_allocator_cache.Init();
393  AllocatorCache cache;
394  typedef typename AllocatorCache::Allocator Allocator;
395  Allocator *a = new Allocator();
396
397  a->Init();
398  cache.Init();
399
400  const uptr kNumAllocs = 10000;
401  const int kNumIter = 100;
402  uptr saved_total = 0;
403  for (int class_id = 1; class_id <= 5; class_id++) {
404    for (int it = 0; it < kNumIter; it++) {
405      void *allocated[kNumAllocs];
406      for (uptr i = 0; i < kNumAllocs; i++) {
407        allocated[i] = cache.Allocate(a, class_id);
408      }
409      for (uptr i = 0; i < kNumAllocs; i++) {
410        cache.Deallocate(a, class_id, allocated[i]);
411      }
412      cache.Drain(a);
413      uptr total_allocated = a->TotalMemoryUsed();
414      if (it)
415        CHECK_EQ(saved_total, total_allocated);
416      saved_total = total_allocated;
417    }
418  }
419
420  a->TestOnlyUnmap();
421  delete a;
422}
423
424#if SANITIZER_WORDSIZE == 64
425TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
426  TestSizeClassAllocatorLocalCache<
427      SizeClassAllocatorLocalCache<Allocator64> >();
428}
429
430TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
431  TestSizeClassAllocatorLocalCache<
432      SizeClassAllocatorLocalCache<Allocator64Compact> >();
433}
434#endif
435
436TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
437  TestSizeClassAllocatorLocalCache<
438      SizeClassAllocatorLocalCache<Allocator32Compact> >();
439}
440
441#if SANITIZER_WORDSIZE == 64
442typedef SizeClassAllocatorLocalCache<Allocator64> AllocatorCache;
443static AllocatorCache static_allocator_cache;
444
445void *AllocatorLeakTestWorker(void *arg) {
446  typedef AllocatorCache::Allocator Allocator;
447  Allocator *a = (Allocator*)(arg);
448  static_allocator_cache.Allocate(a, 10);
449  static_allocator_cache.Drain(a);
450  return 0;
451}
452
453TEST(SanitizerCommon, AllocatorLeakTest) {
454  typedef AllocatorCache::Allocator Allocator;
455  Allocator a;
456  a.Init();
457  uptr total_used_memory = 0;
458  for (int i = 0; i < 100; i++) {
459    pthread_t t;
460    EXPECT_EQ(0, pthread_create(&t, 0, AllocatorLeakTestWorker, &a));
461    EXPECT_EQ(0, pthread_join(t, 0));
462    if (i == 0)
463      total_used_memory = a.TotalMemoryUsed();
464    EXPECT_EQ(a.TotalMemoryUsed(), total_used_memory);
465  }
466
467  a.TestOnlyUnmap();
468}
469#endif
470
471TEST(Allocator, Basic) {
472  char *p = (char*)InternalAlloc(10);
473  EXPECT_NE(p, (char*)0);
474  char *p2 = (char*)InternalAlloc(20);
475  EXPECT_NE(p2, (char*)0);
476  EXPECT_NE(p2, p);
477  InternalFree(p);
478  InternalFree(p2);
479}
480
481TEST(Allocator, Stress) {
482  const int kCount = 1000;
483  char *ptrs[kCount];
484  unsigned rnd = 42;
485  for (int i = 0; i < kCount; i++) {
486    uptr sz = my_rand_r(&rnd) % 1000;
487    char *p = (char*)InternalAlloc(sz);
488    EXPECT_NE(p, (char*)0);
489    ptrs[i] = p;
490  }
491  for (int i = 0; i < kCount; i++) {
492    InternalFree(ptrs[i]);
493  }
494}
495
496TEST(Allocator, ScopedBuffer) {
497  const int kSize = 512;
498  {
499    InternalScopedBuffer<int> int_buf(kSize);
500    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
501  }
502  InternalScopedBuffer<char> char_buf(kSize);
503  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
504  internal_memset(char_buf.data(), 'c', kSize);
505  for (int i = 0; i < kSize; i++) {
506    EXPECT_EQ('c', char_buf[i]);
507  }
508}
509
510#endif  // #if TSAN_DEBUG==0
511