sanitizer_allocator_test.cc revision bb5d0575f9b1100a326f125c7cd8224938af5693
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <pthread.h>
21#include <algorithm>
22#include <vector>
23
24// Too slow for debug build
25#if TSAN_DEBUG == 0
26
27#if SANITIZER_WORDSIZE == 64
28static const uptr kAllocatorSpace = 0x700000000000ULL;
29static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
30static const u64 kAddressSpaceSize = 1ULL << 47;
31
32typedef SizeClassAllocator64<
33  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
34
35typedef SizeClassAllocator64<
36  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
37#else
38static const u64 kAddressSpaceSize = 1ULL << 32;
39#endif
40
41typedef SizeClassAllocator32<
42  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
43
44template <class SizeClassMap>
45void TestSizeClassMap() {
46  typedef SizeClassMap SCMap;
47  // SCMap::Print();
48  SCMap::Validate();
49}
50
51TEST(SanitizerCommon, DefaultSizeClassMap) {
52  TestSizeClassMap<DefaultSizeClassMap>();
53}
54
55TEST(SanitizerCommon, CompactSizeClassMap) {
56  TestSizeClassMap<CompactSizeClassMap>();
57}
58
59template <class Allocator>
60void TestSizeClassAllocator() {
61  Allocator *a = new Allocator;
62  a->Init();
63
64  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
65    50000, 60000, 100000, 120000, 300000, 500000, 1000000, 2000000};
66
67  std::vector<void *> allocated;
68
69  uptr last_total_allocated = 0;
70  for (int i = 0; i < 3; i++) {
71    // Allocate a bunch of chunks.
72    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
73      uptr size = sizes[s];
74      if (!a->CanAllocate(size, 1)) continue;
75      // printf("s = %ld\n", size);
76      uptr n_iter = std::max((uptr)6, 10000000 / size);
77      // fprintf(stderr, "size: %ld iter: %ld\n", size, n_iter);
78      for (uptr i = 0; i < n_iter; i++) {
79        char *x = (char*)a->Allocate(size, 1);
80        x[0] = 0;
81        x[size - 1] = 0;
82        x[size / 2] = 0;
83        allocated.push_back(x);
84        CHECK_EQ(x, a->GetBlockBegin(x));
85        CHECK_EQ(x, a->GetBlockBegin(x + size - 1));
86        CHECK(a->PointerIsMine(x));
87        CHECK(a->PointerIsMine(x + size - 1));
88        CHECK(a->PointerIsMine(x + size / 2));
89        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
90        uptr class_id = a->GetSizeClass(x);
91        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
92        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
93        metadata[0] = reinterpret_cast<uptr>(x) + 1;
94        metadata[1] = 0xABCD;
95      }
96    }
97    // Deallocate all.
98    for (uptr i = 0; i < allocated.size(); i++) {
99      void *x = allocated[i];
100      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
101      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
102      CHECK_EQ(metadata[1], 0xABCD);
103      a->Deallocate(x);
104    }
105    allocated.clear();
106    uptr total_allocated = a->TotalMemoryUsed();
107    if (last_total_allocated == 0)
108      last_total_allocated = total_allocated;
109    CHECK_EQ(last_total_allocated, total_allocated);
110  }
111
112  a->TestOnlyUnmap();
113  delete a;
114}
115
116#if SANITIZER_WORDSIZE == 64
117TEST(SanitizerCommon, SizeClassAllocator64) {
118  TestSizeClassAllocator<Allocator64>();
119}
120
121TEST(SanitizerCommon, SizeClassAllocator64Compact) {
122  TestSizeClassAllocator<Allocator64Compact>();
123}
124#endif
125
126TEST(SanitizerCommon, SizeClassAllocator32Compact) {
127  TestSizeClassAllocator<Allocator32Compact>();
128}
129
130template <class Allocator>
131void SizeClassAllocatorMetadataStress() {
132  Allocator *a = new Allocator;
133  a->Init();
134  static volatile void *sink;
135
136  const uptr kNumAllocs = 10000;
137  void *allocated[kNumAllocs];
138  for (uptr i = 0; i < kNumAllocs; i++) {
139    uptr size = (i % 4096) + 1;
140    void *x = a->Allocate(size, 1);
141    allocated[i] = x;
142  }
143  // Get Metadata kNumAllocs^2 times.
144  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
145    sink = a->GetMetaData(allocated[i % kNumAllocs]);
146  }
147  for (uptr i = 0; i < kNumAllocs; i++) {
148    a->Deallocate(allocated[i]);
149  }
150
151  a->TestOnlyUnmap();
152  (void)sink;
153  delete a;
154}
155
156#if SANITIZER_WORDSIZE == 64
157TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
158  SizeClassAllocatorMetadataStress<Allocator64>();
159}
160
161TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
162  SizeClassAllocatorMetadataStress<Allocator64Compact>();
163}
164#endif
165TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
166  SizeClassAllocatorMetadataStress<Allocator32Compact>();
167}
168
169struct TestMapUnmapCallback {
170  static int map_count, unmap_count;
171  void OnMap(uptr p, uptr size) const { map_count++; }
172  void OnUnmap(uptr p, uptr size) const { unmap_count++; }
173};
174int TestMapUnmapCallback::map_count;
175int TestMapUnmapCallback::unmap_count;
176
177#if SANITIZER_WORDSIZE == 64
178TEST(SanitizerCommon, SizeClassAllocator64MapUnmapCallback) {
179  TestMapUnmapCallback::map_count = 0;
180  TestMapUnmapCallback::unmap_count = 0;
181  typedef SizeClassAllocator64<
182      kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap,
183      TestMapUnmapCallback> Allocator64WithCallBack;
184  Allocator64WithCallBack *a = new Allocator64WithCallBack;
185  a->Init();
186  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
187  a->Allocate(100, 1);
188  EXPECT_EQ(TestMapUnmapCallback::map_count, 3);  // State + alloc + metadata.
189  a->TestOnlyUnmap();
190  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);  // The whole thing.
191  delete a;
192}
193#endif
194
195TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
196  TestMapUnmapCallback::map_count = 0;
197  TestMapUnmapCallback::unmap_count = 0;
198  typedef SizeClassAllocator32<
199      0, kAddressSpaceSize, 16, CompactSizeClassMap,
200      TestMapUnmapCallback> Allocator32WithCallBack;
201  Allocator32WithCallBack *a = new Allocator32WithCallBack;
202  a->Init();
203  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
204  a->Allocate(100, 1);
205  EXPECT_EQ(TestMapUnmapCallback::map_count, 2);  // alloc.
206  a->TestOnlyUnmap();
207  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2);  // The whole thing + alloc.
208  delete a;
209  // fprintf(stderr, "Map: %d Unmap: %d\n",
210  //         TestMapUnmapCallback::map_count,
211  //         TestMapUnmapCallback::unmap_count);
212}
213
214TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
215  TestMapUnmapCallback::map_count = 0;
216  TestMapUnmapCallback::unmap_count = 0;
217  LargeMmapAllocator<TestMapUnmapCallback> a;
218  a.Init();
219  void *x = a.Allocate(1 << 20, 1);
220  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
221  a.Deallocate(x);
222  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
223}
224
225template<class Allocator>
226void FailInAssertionOnOOM() {
227  Allocator a;
228  a.Init();
229  const uptr size = 1 << 15;
230  for (int i = 0; i < 1000000; i++) {
231    a.Allocate(size, 1);
232  }
233
234  a.TestOnlyUnmap();
235}
236
237#if SANITIZER_WORDSIZE == 64
238TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
239  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
240}
241#endif
242
243TEST(SanitizerCommon, LargeMmapAllocator) {
244  LargeMmapAllocator<> a;
245  a.Init();
246
247  static const int kNumAllocs = 1000;
248  char *allocated[kNumAllocs];
249  static const uptr size = 4000;
250  // Allocate some.
251  for (int i = 0; i < kNumAllocs; i++) {
252    allocated[i] = (char *)a.Allocate(size, 1);
253    CHECK(a.PointerIsMine(allocated[i]));
254  }
255  // Deallocate all.
256  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
257  for (int i = 0; i < kNumAllocs; i++) {
258    char *p = allocated[i];
259    CHECK(a.PointerIsMine(p));
260    a.Deallocate(p);
261  }
262  // Check that non left.
263  CHECK_EQ(a.TotalMemoryUsed(), 0);
264
265  // Allocate some more, also add metadata.
266  for (int i = 0; i < kNumAllocs; i++) {
267    char *x = (char *)a.Allocate(size, 1);
268    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
269    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
270    *meta = i;
271    allocated[i] = x;
272  }
273  for (int i = 0; i < kNumAllocs * kNumAllocs; i++) {
274    char *p = allocated[i % kNumAllocs];
275    CHECK(a.PointerIsMine(p));
276    CHECK(a.PointerIsMine(p + 2000));
277  }
278  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
279  // Deallocate all in reverse order.
280  for (int i = 0; i < kNumAllocs; i++) {
281    int idx = kNumAllocs - i - 1;
282    char *p = allocated[idx];
283    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
284    CHECK_EQ(*meta, idx);
285    CHECK(a.PointerIsMine(p));
286    a.Deallocate(p);
287  }
288  CHECK_EQ(a.TotalMemoryUsed(), 0);
289
290  // Test alignments.
291  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
292  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
293    const uptr kNumAlignedAllocs = 100;
294    for (uptr i = 0; i < kNumAlignedAllocs; i++) {
295      uptr size = ((i % 10) + 1) * 4096;
296      char *p = allocated[i] = (char *)a.Allocate(size, alignment);
297      CHECK_EQ(p, a.GetBlockBegin(p));
298      CHECK_EQ(p, a.GetBlockBegin(p + size - 1));
299      CHECK_EQ(p, a.GetBlockBegin(p + size / 2));
300      CHECK_EQ(0, (uptr)allocated[i] % alignment);
301      p[0] = p[size - 1] = 0;
302    }
303    for (uptr i = 0; i < kNumAlignedAllocs; i++) {
304      a.Deallocate(allocated[i]);
305    }
306  }
307}
308
309template
310<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
311void TestCombinedAllocator() {
312  typedef
313      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
314      Allocator;
315  Allocator *a = new Allocator;
316  a->Init();
317
318  AllocatorCache cache;
319  cache.Init();
320
321  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
322  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
323  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
324  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
325  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
326
327  const uptr kNumAllocs = 100000;
328  const uptr kNumIter = 10;
329  for (uptr iter = 0; iter < kNumIter; iter++) {
330    std::vector<void*> allocated;
331    for (uptr i = 0; i < kNumAllocs; i++) {
332      uptr size = (i % (1 << 14)) + 1;
333      if ((i % 1024) == 0)
334        size = 1 << (10 + (i % 14));
335      void *x = a->Allocate(&cache, size, 1);
336      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
337      CHECK_EQ(*meta, 0);
338      *meta = size;
339      allocated.push_back(x);
340    }
341
342    random_shuffle(allocated.begin(), allocated.end());
343
344    for (uptr i = 0; i < kNumAllocs; i++) {
345      void *x = allocated[i];
346      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
347      CHECK_NE(*meta, 0);
348      CHECK(a->PointerIsMine(x));
349      *meta = 0;
350      a->Deallocate(&cache, x);
351    }
352    allocated.clear();
353    a->SwallowCache(&cache);
354  }
355  a->TestOnlyUnmap();
356}
357
358#if SANITIZER_WORDSIZE == 64
359TEST(SanitizerCommon, CombinedAllocator64) {
360  TestCombinedAllocator<Allocator64,
361      LargeMmapAllocator<>,
362      SizeClassAllocatorLocalCache<Allocator64> > ();
363}
364
365TEST(SanitizerCommon, CombinedAllocator64Compact) {
366  TestCombinedAllocator<Allocator64Compact,
367      LargeMmapAllocator<>,
368      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
369}
370#endif
371
372TEST(SanitizerCommon, CombinedAllocator32Compact) {
373  TestCombinedAllocator<Allocator32Compact,
374      LargeMmapAllocator<>,
375      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
376}
377
378template <class AllocatorCache>
379void TestSizeClassAllocatorLocalCache() {
380  static THREADLOCAL AllocatorCache static_allocator_cache;
381  static_allocator_cache.Init();
382  AllocatorCache cache;
383  typedef typename AllocatorCache::Allocator Allocator;
384  Allocator *a = new Allocator();
385
386  a->Init();
387  cache.Init();
388
389  const uptr kNumAllocs = 10000;
390  const int kNumIter = 100;
391  uptr saved_total = 0;
392  for (int class_id = 1; class_id <= 5; class_id++) {
393    for (int it = 0; it < kNumIter; it++) {
394      void *allocated[kNumAllocs];
395      for (uptr i = 0; i < kNumAllocs; i++) {
396        allocated[i] = cache.Allocate(a, class_id);
397      }
398      for (uptr i = 0; i < kNumAllocs; i++) {
399        cache.Deallocate(a, class_id, allocated[i]);
400      }
401      cache.Drain(a);
402      uptr total_allocated = a->TotalMemoryUsed();
403      if (it)
404        CHECK_EQ(saved_total, total_allocated);
405      saved_total = total_allocated;
406    }
407  }
408
409  a->TestOnlyUnmap();
410  delete a;
411}
412
413#if SANITIZER_WORDSIZE == 64
414TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
415  TestSizeClassAllocatorLocalCache<
416      SizeClassAllocatorLocalCache<Allocator64> >();
417}
418
419TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
420  TestSizeClassAllocatorLocalCache<
421      SizeClassAllocatorLocalCache<Allocator64Compact> >();
422}
423#endif
424
425TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
426  TestSizeClassAllocatorLocalCache<
427      SizeClassAllocatorLocalCache<Allocator32Compact> >();
428}
429
430#if SANITIZER_WORDSIZE == 64
431typedef SizeClassAllocatorLocalCache<Allocator64> AllocatorCache;
432static THREADLOCAL AllocatorCache static_allocator_cache;
433
434void *AllocatorLeakTestWorker(void *arg) {
435  typedef AllocatorCache::Allocator Allocator;
436  Allocator *a = (Allocator*)(arg);
437  static_allocator_cache.Allocate(a, 10);
438  static_allocator_cache.Drain(a);
439  return 0;
440}
441
442TEST(SanitizerCommon, AllocatorLeakTest) {
443  typedef AllocatorCache::Allocator Allocator;
444  Allocator a;
445  a.Init();
446  uptr total_used_memory = 0;
447  for (int i = 0; i < 100; i++) {
448    pthread_t t;
449    EXPECT_EQ(0, pthread_create(&t, 0, AllocatorLeakTestWorker, &a));
450    EXPECT_EQ(0, pthread_join(t, 0));
451    if (i == 0)
452      total_used_memory = a.TotalMemoryUsed();
453    EXPECT_EQ(a.TotalMemoryUsed(), total_used_memory);
454  }
455
456  a.TestOnlyUnmap();
457}
458#endif
459
460TEST(Allocator, Basic) {
461  char *p = (char*)InternalAlloc(10);
462  EXPECT_NE(p, (char*)0);
463  char *p2 = (char*)InternalAlloc(20);
464  EXPECT_NE(p2, (char*)0);
465  EXPECT_NE(p2, p);
466  InternalFree(p);
467  InternalFree(p2);
468}
469
470TEST(Allocator, Stress) {
471  const int kCount = 1000;
472  char *ptrs[kCount];
473  unsigned rnd = 42;
474  for (int i = 0; i < kCount; i++) {
475    uptr sz = rand_r(&rnd) % 1000;
476    char *p = (char*)InternalAlloc(sz);
477    EXPECT_NE(p, (char*)0);
478    ptrs[i] = p;
479  }
480  for (int i = 0; i < kCount; i++) {
481    InternalFree(ptrs[i]);
482  }
483}
484
485TEST(Allocator, ScopedBuffer) {
486  const int kSize = 512;
487  {
488    InternalScopedBuffer<int> int_buf(kSize);
489    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
490  }
491  InternalScopedBuffer<char> char_buf(kSize);
492  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
493  internal_memset(char_buf.data(), 'c', kSize);
494  for (int i = 0; i < kSize; i++) {
495    EXPECT_EQ('c', char_buf[i]);
496  }
497}
498
499#endif  // #if TSAN_DEBUG==0
500