sanitizer_allocator_test.cc revision 784935d1bbc301eaf92fd9f7d3a551eb65edcd15
12a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//===-- sanitizer_allocator_test.cc ---------------------------------------===//
22a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//
32a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//                     The LLVM Compiler Infrastructure
42a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//
52a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)// This file is distributed under the University of Illinois Open Source
62a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)// License. See LICENSE.TXT for details.
72a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//
82a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//===----------------------------------------------------------------------===//
92a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//
102a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
117d4cd473f85ac64c3747c96c277f9e506a0d2246Torne (Richard Coles)// Tests for sanitizer_allocator.h.
122a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//
132a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)//===----------------------------------------------------------------------===//
142a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#include "sanitizer_common/sanitizer_allocator.h"
152a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#include "sanitizer_common/sanitizer_common.h"
162a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)
172a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#include "gtest/gtest.h"
182a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)
192a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#include <stdlib.h>
202a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#include <algorithm>
212a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#include <vector>
222a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)
232a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#if SANITIZER_WORDSIZE == 64
242a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)static const uptr kAllocatorSpace = 0x700000000000ULL;
252a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
262a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)static const u64 kAddressSpaceSize = 1ULL << 47;
272a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)
285d1f7b1de12d16ceb2c938c56701a3e8bfa558f7Torne (Richard Coles)typedef SizeClassAllocator64<
295d1f7b1de12d16ceb2c938c56701a3e8bfa558f7Torne (Richard Coles)  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
302a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)
312a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)typedef SizeClassAllocator64<
322a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
332a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#else
342a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)static const u64 kAddressSpaceSize = 1ULL << 32;
352a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#endif
36
37typedef SizeClassAllocator32<
38  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
39
40template <class SizeClassMap>
41void TestSizeClassMap() {
42  typedef SizeClassMap SCMap;
43#if 0
44  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
45    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
46        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
47        SCMap::MaxCached(i));
48  }
49#endif
50  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
51    uptr s = SCMap::Size(c);
52    CHECK_EQ(SCMap::ClassID(s), c);
53    if (c != SCMap::kNumClasses - 1)
54      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
55    CHECK_EQ(SCMap::ClassID(s - 1), c);
56    if (c)
57      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
58  }
59  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
60
61  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
62    uptr c = SCMap::ClassID(s);
63    CHECK_LT(c, SCMap::kNumClasses);
64    CHECK_GE(SCMap::Size(c), s);
65    if (c > 0)
66      CHECK_LT(SCMap::Size(c-1), s);
67  }
68}
69
70TEST(SanitizerCommon, DefaultSizeClassMap) {
71  TestSizeClassMap<DefaultSizeClassMap>();
72}
73
74TEST(SanitizerCommon, CompactSizeClassMap) {
75  TestSizeClassMap<CompactSizeClassMap>();
76}
77
78template <class Allocator>
79void TestSizeClassAllocator() {
80  Allocator *a = new Allocator;
81  a->Init();
82
83  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
84    50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
85
86  std::vector<void *> allocated;
87
88  uptr last_total_allocated = 0;
89  for (int i = 0; i < 5; i++) {
90    // Allocate a bunch of chunks.
91    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
92      uptr size = sizes[s];
93      if (!a->CanAllocate(size, 1)) continue;
94      // printf("s = %ld\n", size);
95      uptr n_iter = std::max((uptr)2, 1000000 / size);
96      for (uptr i = 0; i < n_iter; i++) {
97        void *x = a->Allocate(size, 1);
98        allocated.push_back(x);
99        CHECK(a->PointerIsMine(x));
100        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
101        uptr class_id = a->GetSizeClass(x);
102        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
103        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
104        metadata[0] = reinterpret_cast<uptr>(x) + 1;
105        metadata[1] = 0xABCD;
106      }
107    }
108    // Deallocate all.
109    for (uptr i = 0; i < allocated.size(); i++) {
110      void *x = allocated[i];
111      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
112      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
113      CHECK_EQ(metadata[1], 0xABCD);
114      a->Deallocate(x);
115    }
116    allocated.clear();
117    uptr total_allocated = a->TotalMemoryUsed();
118    if (last_total_allocated == 0)
119      last_total_allocated = total_allocated;
120    CHECK_EQ(last_total_allocated, total_allocated);
121  }
122
123  a->TestOnlyUnmap();
124  delete a;
125}
126
127#if SANITIZER_WORDSIZE == 64
128TEST(SanitizerCommon, SizeClassAllocator64) {
129  TestSizeClassAllocator<Allocator64>();
130}
131
132TEST(SanitizerCommon, SizeClassAllocator64Compact) {
133  TestSizeClassAllocator<Allocator64Compact>();
134}
135#endif
136
137TEST(SanitizerCommon, SizeClassAllocator32Compact) {
138  TestSizeClassAllocator<Allocator32Compact>();
139}
140
141template <class Allocator>
142void SizeClassAllocatorMetadataStress() {
143  Allocator *a = new Allocator;
144  a->Init();
145  static volatile void *sink;
146
147  const uptr kNumAllocs = 10000;
148  void *allocated[kNumAllocs];
149  for (uptr i = 0; i < kNumAllocs; i++) {
150    uptr size = (i % 4096) + 1;
151    void *x = a->Allocate(size, 1);
152    allocated[i] = x;
153  }
154  // Get Metadata kNumAllocs^2 times.
155  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
156    sink = a->GetMetaData(allocated[i % kNumAllocs]);
157  }
158  for (uptr i = 0; i < kNumAllocs; i++) {
159    a->Deallocate(allocated[i]);
160  }
161
162  a->TestOnlyUnmap();
163  (void)sink;
164  delete a;
165}
166
167#if SANITIZER_WORDSIZE == 64
168TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
169  SizeClassAllocatorMetadataStress<Allocator64>();
170}
171
172TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
173  SizeClassAllocatorMetadataStress<Allocator64Compact>();
174}
175#endif
176TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
177  SizeClassAllocatorMetadataStress<Allocator32Compact>();
178}
179
180template<class Allocator>
181void FailInAssertionOnOOM() {
182  Allocator a;
183  a.Init();
184  const uptr size = 1 << 20;
185  for (int i = 0; i < 1000000; i++) {
186    a.Allocate(size, 1);
187  }
188
189  a.TestOnlyUnmap();
190}
191
192#if SANITIZER_WORDSIZE == 64
193TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
194  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
195}
196#endif
197
198TEST(SanitizerCommon, LargeMmapAllocator) {
199  LargeMmapAllocator a;
200  a.Init();
201
202  static const int kNumAllocs = 100;
203  void *allocated[kNumAllocs];
204  static const uptr size = 1000;
205  // Allocate some.
206  for (int i = 0; i < kNumAllocs; i++) {
207    allocated[i] = a.Allocate(size, 1);
208  }
209  // Deallocate all.
210  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
211  for (int i = 0; i < kNumAllocs; i++) {
212    void *p = allocated[i];
213    CHECK(a.PointerIsMine(p));
214    a.Deallocate(p);
215  }
216  // Check that non left.
217  CHECK_EQ(a.TotalMemoryUsed(), 0);
218
219  // Allocate some more, also add metadata.
220  for (int i = 0; i < kNumAllocs; i++) {
221    void *x = a.Allocate(size, 1);
222    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
223    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
224    *meta = i;
225    allocated[i] = x;
226  }
227  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
228  // Deallocate all in reverse order.
229  for (int i = 0; i < kNumAllocs; i++) {
230    int idx = kNumAllocs - i - 1;
231    void *p = allocated[idx];
232    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
233    CHECK_EQ(*meta, idx);
234    CHECK(a.PointerIsMine(p));
235    a.Deallocate(p);
236  }
237  CHECK_EQ(a.TotalMemoryUsed(), 0);
238  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
239  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
240    for (int i = 0; i < kNumAllocs; i++) {
241      uptr size = ((i % 10) + 1) * 4096;
242      allocated[i] = a.Allocate(size, alignment);
243      CHECK_EQ(0, (uptr)allocated[i] % alignment);
244      char *p = (char*)allocated[i];
245      p[0] = p[size - 1] = 0;
246    }
247    for (int i = 0; i < kNumAllocs; i++) {
248      a.Deallocate(allocated[i]);
249    }
250  }
251}
252
253template
254<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
255void TestCombinedAllocator() {
256  CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator> a;
257  a.Init();
258
259  AllocatorCache cache;
260  cache.Init();
261
262  EXPECT_EQ(a.Allocate(&cache, -1, 1), (void*)0);
263  EXPECT_EQ(a.Allocate(&cache, -1, 1024), (void*)0);
264  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
265  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
266  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
267
268  const uptr kNumAllocs = 100000;
269  const uptr kNumIter = 10;
270  for (uptr iter = 0; iter < kNumIter; iter++) {
271    std::vector<void*> allocated;
272    for (uptr i = 0; i < kNumAllocs; i++) {
273      uptr size = (i % (1 << 14)) + 1;
274      if ((i % 1024) == 0)
275        size = 1 << (10 + (i % 14));
276      void *x = a.Allocate(&cache, size, 1);
277      uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
278      CHECK_EQ(*meta, 0);
279      *meta = size;
280      allocated.push_back(x);
281    }
282
283    random_shuffle(allocated.begin(), allocated.end());
284
285    for (uptr i = 0; i < kNumAllocs; i++) {
286      void *x = allocated[i];
287      uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
288      CHECK_NE(*meta, 0);
289      CHECK(a.PointerIsMine(x));
290      *meta = 0;
291      a.Deallocate(&cache, x);
292    }
293    allocated.clear();
294    a.SwallowCache(&cache);
295  }
296  a.TestOnlyUnmap();
297}
298
299#if SANITIZER_WORDSIZE == 64
300TEST(SanitizerCommon, CombinedAllocator) {
301  TestCombinedAllocator<Allocator64,
302      LargeMmapAllocator,
303      SizeClassAllocatorLocalCache<Allocator64> > ();
304}
305#endif
306
307template <class AllocatorCache>
308void TestSizeClassAllocatorLocalCache() {
309  static THREADLOCAL AllocatorCache static_allocator_cache;
310  static_allocator_cache.Init();
311  AllocatorCache cache;
312  typename AllocatorCache::Allocator a;
313
314  a.Init();
315  cache.Init();
316
317  const uptr kNumAllocs = 10000;
318  const int kNumIter = 100;
319  uptr saved_total = 0;
320  for (int i = 0; i < kNumIter; i++) {
321    void *allocated[kNumAllocs];
322    for (uptr i = 0; i < kNumAllocs; i++) {
323      allocated[i] = cache.Allocate(&a, 0);
324    }
325    for (uptr i = 0; i < kNumAllocs; i++) {
326      cache.Deallocate(&a, 0, allocated[i]);
327    }
328    cache.Drain(&a);
329    uptr total_allocated = a.TotalMemoryUsed();
330    if (saved_total)
331      CHECK_EQ(saved_total, total_allocated);
332    saved_total = total_allocated;
333  }
334
335  a.TestOnlyUnmap();
336}
337
338#if SANITIZER_WORDSIZE == 64
339TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
340  TestSizeClassAllocatorLocalCache<
341      SizeClassAllocatorLocalCache<Allocator64> >();
342}
343#endif
344
345TEST(Allocator, Basic) {
346  char *p = (char*)InternalAlloc(10);
347  EXPECT_NE(p, (char*)0);
348  char *p2 = (char*)InternalAlloc(20);
349  EXPECT_NE(p2, (char*)0);
350  EXPECT_NE(p2, p);
351  InternalFree(p);
352  InternalFree(p2);
353}
354
355TEST(Allocator, Stress) {
356  const int kCount = 1000;
357  char *ptrs[kCount];
358  unsigned rnd = 42;
359  for (int i = 0; i < kCount; i++) {
360    uptr sz = rand_r(&rnd) % 1000;
361    char *p = (char*)InternalAlloc(sz);
362    EXPECT_NE(p, (char*)0);
363    ptrs[i] = p;
364  }
365  for (int i = 0; i < kCount; i++) {
366    InternalFree(ptrs[i]);
367  }
368}
369
370TEST(Allocator, ScopedBuffer) {
371  const int kSize = 512;
372  {
373    InternalScopedBuffer<int> int_buf(kSize);
374    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
375  }
376  InternalScopedBuffer<char> char_buf(kSize);
377  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
378  memset(char_buf.data(), 'c', kSize);
379  for (int i = 0; i < kSize; i++) {
380    EXPECT_EQ('c', char_buf[i]);
381  }
382}
383