sanitizer_allocator_test.cc revision 68902f431172d101f3c6322d96a99d705a37ad95
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <algorithm>
21#include <vector>
22
23// Too slow for debug build
24#if TSAN_DEBUG==0
25
26#if SANITIZER_WORDSIZE == 64
27static const uptr kAllocatorSpace = 0x700000000000ULL;
28static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
29static const u64 kAddressSpaceSize = 1ULL << 47;
30
31typedef SizeClassAllocator64<
32  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
33
34typedef SizeClassAllocator64<
35  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
36#else
37static const u64 kAddressSpaceSize = 1ULL << 32;
38#endif
39
40typedef SizeClassAllocator32<
41  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
42
43template <class SizeClassMap>
44void TestSizeClassMap() {
45  typedef SizeClassMap SCMap;
46#if 0
47  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
48    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
49        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
50        SCMap::MaxCached(i));
51  }
52#endif
53  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
54    uptr s = SCMap::Size(c);
55    CHECK_EQ(SCMap::ClassID(s), c);
56    if (c != SCMap::kNumClasses - 1)
57      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
58    CHECK_EQ(SCMap::ClassID(s - 1), c);
59    if (c)
60      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
61  }
62  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
63
64  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
65    uptr c = SCMap::ClassID(s);
66    CHECK_LT(c, SCMap::kNumClasses);
67    CHECK_GE(SCMap::Size(c), s);
68    if (c > 0)
69      CHECK_LT(SCMap::Size(c-1), s);
70  }
71}
72
73TEST(SanitizerCommon, DefaultSizeClassMap) {
74  TestSizeClassMap<DefaultSizeClassMap>();
75}
76
77TEST(SanitizerCommon, CompactSizeClassMap) {
78  TestSizeClassMap<CompactSizeClassMap>();
79}
80
81template <class Allocator>
82void TestSizeClassAllocator() {
83  Allocator *a = new Allocator;
84  a->Init();
85
86  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
87    50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
88
89  std::vector<void *> allocated;
90
91  uptr last_total_allocated = 0;
92  for (int i = 0; i < 5; i++) {
93    // Allocate a bunch of chunks.
94    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
95      uptr size = sizes[s];
96      if (!a->CanAllocate(size, 1)) continue;
97      // printf("s = %ld\n", size);
98      uptr n_iter = std::max((uptr)2, 1000000 / size);
99      for (uptr i = 0; i < n_iter; i++) {
100        void *x = a->Allocate(size, 1);
101        allocated.push_back(x);
102        CHECK_EQ(x, a->GetBlockBegin(x));
103        CHECK_EQ(x, a->GetBlockBegin((char*)x + size - 1));
104        CHECK(a->PointerIsMine(x));
105        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
106        uptr class_id = a->GetSizeClass(x);
107        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
108        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
109        metadata[0] = reinterpret_cast<uptr>(x) + 1;
110        metadata[1] = 0xABCD;
111      }
112    }
113    // Deallocate all.
114    for (uptr i = 0; i < allocated.size(); i++) {
115      void *x = allocated[i];
116      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
117      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
118      CHECK_EQ(metadata[1], 0xABCD);
119      a->Deallocate(x);
120    }
121    allocated.clear();
122    uptr total_allocated = a->TotalMemoryUsed();
123    if (last_total_allocated == 0)
124      last_total_allocated = total_allocated;
125    CHECK_EQ(last_total_allocated, total_allocated);
126  }
127
128  a->TestOnlyUnmap();
129  delete a;
130}
131
132#if SANITIZER_WORDSIZE == 64
133TEST(SanitizerCommon, SizeClassAllocator64) {
134  TestSizeClassAllocator<Allocator64>();
135}
136
137TEST(SanitizerCommon, SizeClassAllocator64Compact) {
138  TestSizeClassAllocator<Allocator64Compact>();
139}
140#endif
141
142TEST(SanitizerCommon, SizeClassAllocator32Compact) {
143  TestSizeClassAllocator<Allocator32Compact>();
144}
145
146template <class Allocator>
147void SizeClassAllocatorMetadataStress() {
148  Allocator *a = new Allocator;
149  a->Init();
150  static volatile void *sink;
151
152  const uptr kNumAllocs = 10000;
153  void *allocated[kNumAllocs];
154  for (uptr i = 0; i < kNumAllocs; i++) {
155    uptr size = (i % 4096) + 1;
156    void *x = a->Allocate(size, 1);
157    allocated[i] = x;
158  }
159  // Get Metadata kNumAllocs^2 times.
160  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
161    sink = a->GetMetaData(allocated[i % kNumAllocs]);
162  }
163  for (uptr i = 0; i < kNumAllocs; i++) {
164    a->Deallocate(allocated[i]);
165  }
166
167  a->TestOnlyUnmap();
168  (void)sink;
169  delete a;
170}
171
172#if SANITIZER_WORDSIZE == 64
173TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
174  SizeClassAllocatorMetadataStress<Allocator64>();
175}
176
177TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
178  SizeClassAllocatorMetadataStress<Allocator64Compact>();
179}
180#endif
181TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
182  SizeClassAllocatorMetadataStress<Allocator32Compact>();
183}
184
185struct TestMapUnmapCallback {
186  static int map_count, unmap_count;
187  void OnMap(uptr p, uptr size) const { map_count++; }
188  void OnUnmap(uptr p, uptr size) const { unmap_count++; }
189};
190int TestMapUnmapCallback::map_count;
191int TestMapUnmapCallback::unmap_count;
192
193#if SANITIZER_WORDSIZE == 64
194TEST(SanitizerCommon, SizeClassAllocator64MapUnmapCallback) {
195  TestMapUnmapCallback::map_count = 0;
196  TestMapUnmapCallback::unmap_count = 0;
197  typedef SizeClassAllocator64<
198      kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap,
199      TestMapUnmapCallback> Allocator64WithCallBack;
200  Allocator64WithCallBack *a = new Allocator64WithCallBack;
201  a->Init();
202  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
203  a->Allocate(100, 1);
204  EXPECT_EQ(TestMapUnmapCallback::map_count, 3);  // State + alloc + metadata.
205  a->TestOnlyUnmap();
206  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);  // The whole thing.
207  delete a;
208}
209#endif
210
211TEST(SanitizerCommon, SizeClassAllocator32MapUnmapCallback) {
212  TestMapUnmapCallback::map_count = 0;
213  TestMapUnmapCallback::unmap_count = 0;
214  typedef SizeClassAllocator32<
215      0, kAddressSpaceSize, 16, CompactSizeClassMap,
216      TestMapUnmapCallback> Allocator32WithCallBack;
217  Allocator32WithCallBack *a = new Allocator32WithCallBack;
218  a->Init();
219  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);  // Allocator state.
220  a->Allocate(100, 1);
221  EXPECT_EQ(TestMapUnmapCallback::map_count, 2);  // alloc.
222  a->TestOnlyUnmap();
223  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 2);  // The whole thing + alloc.
224  delete a;
225  // fprintf(stderr, "Map: %d Unmap: %d\n",
226  //         TestMapUnmapCallback::map_count,
227  //         TestMapUnmapCallback::unmap_count);
228}
229
230TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
231  TestMapUnmapCallback::map_count = 0;
232  TestMapUnmapCallback::unmap_count = 0;
233  LargeMmapAllocator<TestMapUnmapCallback> a;
234  a.Init();
235  void *x = a.Allocate(1 << 20, 1);
236  EXPECT_EQ(TestMapUnmapCallback::map_count, 1);
237  a.Deallocate(x);
238  EXPECT_EQ(TestMapUnmapCallback::unmap_count, 1);
239}
240
241template<class Allocator>
242void FailInAssertionOnOOM() {
243  Allocator a;
244  a.Init();
245  const uptr size = 1 << 20;
246  for (int i = 0; i < 1000000; i++) {
247    a.Allocate(size, 1);
248  }
249
250  a.TestOnlyUnmap();
251}
252
253#if SANITIZER_WORDSIZE == 64
254TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
255  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
256}
257#endif
258
259TEST(SanitizerCommon, LargeMmapAllocator) {
260  LargeMmapAllocator<> a;
261  a.Init();
262
263  static const int kNumAllocs = 100;
264  void *allocated[kNumAllocs];
265  static const uptr size = 1000;
266  // Allocate some.
267  for (int i = 0; i < kNumAllocs; i++) {
268    allocated[i] = a.Allocate(size, 1);
269  }
270  // Deallocate all.
271  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
272  for (int i = 0; i < kNumAllocs; i++) {
273    void *p = allocated[i];
274    CHECK(a.PointerIsMine(p));
275    a.Deallocate(p);
276  }
277  // Check that non left.
278  CHECK_EQ(a.TotalMemoryUsed(), 0);
279
280  // Allocate some more, also add metadata.
281  for (int i = 0; i < kNumAllocs; i++) {
282    void *x = a.Allocate(size, 1);
283    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
284    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
285    *meta = i;
286    allocated[i] = x;
287  }
288  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
289  // Deallocate all in reverse order.
290  for (int i = 0; i < kNumAllocs; i++) {
291    int idx = kNumAllocs - i - 1;
292    void *p = allocated[idx];
293    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
294    CHECK_EQ(*meta, idx);
295    CHECK(a.PointerIsMine(p));
296    a.Deallocate(p);
297  }
298  CHECK_EQ(a.TotalMemoryUsed(), 0);
299  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
300  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
301    for (int i = 0; i < kNumAllocs; i++) {
302      uptr size = ((i % 10) + 1) * 4096;
303      allocated[i] = a.Allocate(size, alignment);
304      CHECK_EQ(0, (uptr)allocated[i] % alignment);
305      char *p = (char*)allocated[i];
306      p[0] = p[size - 1] = 0;
307    }
308    for (int i = 0; i < kNumAllocs; i++) {
309      a.Deallocate(allocated[i]);
310    }
311  }
312}
313
314template
315<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
316void TestCombinedAllocator() {
317  typedef
318      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
319      Allocator;
320  Allocator *a = new Allocator;
321  a->Init();
322
323  AllocatorCache cache;
324  cache.Init();
325
326  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
327  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
328  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
329  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
330  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
331
332  const uptr kNumAllocs = 100000;
333  const uptr kNumIter = 10;
334  for (uptr iter = 0; iter < kNumIter; iter++) {
335    std::vector<void*> allocated;
336    for (uptr i = 0; i < kNumAllocs; i++) {
337      uptr size = (i % (1 << 14)) + 1;
338      if ((i % 1024) == 0)
339        size = 1 << (10 + (i % 14));
340      void *x = a->Allocate(&cache, size, 1);
341      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
342      CHECK_EQ(*meta, 0);
343      *meta = size;
344      allocated.push_back(x);
345    }
346
347    random_shuffle(allocated.begin(), allocated.end());
348
349    for (uptr i = 0; i < kNumAllocs; i++) {
350      void *x = allocated[i];
351      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
352      CHECK_NE(*meta, 0);
353      CHECK(a->PointerIsMine(x));
354      *meta = 0;
355      a->Deallocate(&cache, x);
356    }
357    allocated.clear();
358    a->SwallowCache(&cache);
359  }
360  a->TestOnlyUnmap();
361}
362
363#if SANITIZER_WORDSIZE == 64
364TEST(SanitizerCommon, CombinedAllocator64) {
365  TestCombinedAllocator<Allocator64,
366      LargeMmapAllocator<>,
367      SizeClassAllocatorLocalCache<Allocator64> > ();
368}
369
370TEST(SanitizerCommon, CombinedAllocator64Compact) {
371  TestCombinedAllocator<Allocator64Compact,
372      LargeMmapAllocator<>,
373      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
374}
375#endif
376
377TEST(SanitizerCommon, CombinedAllocator32Compact) {
378  TestCombinedAllocator<Allocator32Compact,
379      LargeMmapAllocator<>,
380      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
381}
382
383template <class AllocatorCache>
384void TestSizeClassAllocatorLocalCache() {
385  static THREADLOCAL AllocatorCache static_allocator_cache;
386  static_allocator_cache.Init();
387  AllocatorCache cache;
388  typedef typename AllocatorCache::Allocator Allocator;
389  Allocator *a = new Allocator();
390
391  a->Init();
392  cache.Init();
393
394  const uptr kNumAllocs = 10000;
395  const int kNumIter = 100;
396  uptr saved_total = 0;
397  for (int i = 0; i < kNumIter; i++) {
398    void *allocated[kNumAllocs];
399    for (uptr i = 0; i < kNumAllocs; i++) {
400      allocated[i] = cache.Allocate(a, 0);
401    }
402    for (uptr i = 0; i < kNumAllocs; i++) {
403      cache.Deallocate(a, 0, allocated[i]);
404    }
405    cache.Drain(a);
406    uptr total_allocated = a->TotalMemoryUsed();
407    if (saved_total)
408      CHECK_EQ(saved_total, total_allocated);
409    saved_total = total_allocated;
410  }
411
412  a->TestOnlyUnmap();
413  delete a;
414}
415
416#if SANITIZER_WORDSIZE == 64
417TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
418  TestSizeClassAllocatorLocalCache<
419      SizeClassAllocatorLocalCache<Allocator64> >();
420}
421
422TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
423  TestSizeClassAllocatorLocalCache<
424      SizeClassAllocatorLocalCache<Allocator64Compact> >();
425}
426#endif
427
428TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
429  TestSizeClassAllocatorLocalCache<
430      SizeClassAllocatorLocalCache<Allocator32Compact> >();
431}
432
433TEST(Allocator, Basic) {
434  char *p = (char*)InternalAlloc(10);
435  EXPECT_NE(p, (char*)0);
436  char *p2 = (char*)InternalAlloc(20);
437  EXPECT_NE(p2, (char*)0);
438  EXPECT_NE(p2, p);
439  InternalFree(p);
440  InternalFree(p2);
441}
442
443TEST(Allocator, Stress) {
444  const int kCount = 1000;
445  char *ptrs[kCount];
446  unsigned rnd = 42;
447  for (int i = 0; i < kCount; i++) {
448    uptr sz = rand_r(&rnd) % 1000;
449    char *p = (char*)InternalAlloc(sz);
450    EXPECT_NE(p, (char*)0);
451    ptrs[i] = p;
452  }
453  for (int i = 0; i < kCount; i++) {
454    InternalFree(ptrs[i]);
455  }
456}
457
458TEST(Allocator, ScopedBuffer) {
459  const int kSize = 512;
460  {
461    InternalScopedBuffer<int> int_buf(kSize);
462    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
463  }
464  InternalScopedBuffer<char> char_buf(kSize);
465  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
466  internal_memset(char_buf.data(), 'c', kSize);
467  for (int i = 0; i < kSize; i++) {
468    EXPECT_EQ('c', char_buf[i]);
469  }
470}
471
472#endif  // #if TSAN_DEBUG==0
473