sanitizer_allocator_test.cc revision e280ce59d37a67bee14da56a22e205d6562530b0
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <algorithm>
21#include <vector>
22
23#if SANITIZER_WORDSIZE == 64
24static const uptr kAllocatorSpace = 0x700000000000ULL;
25static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
26static const u64 kAddressSpaceSize = 1ULL << 47;
27
28typedef SizeClassAllocator64<
29  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
30
31typedef SizeClassAllocator64<
32  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
33#else
34static const u64 kAddressSpaceSize = 1ULL << 32;
35#endif
36
37typedef SizeClassAllocator32<
38  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
39
40template <class SizeClassMap>
41void TestSizeClassMap() {
42  typedef SizeClassMap SCMap;
43#if 0
44  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
45    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
46        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
47        SCMap::MaxCached(i));
48  }
49#endif
50  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
51    uptr s = SCMap::Size(c);
52    CHECK_EQ(SCMap::ClassID(s), c);
53    if (c != SCMap::kNumClasses - 1)
54      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
55    CHECK_EQ(SCMap::ClassID(s - 1), c);
56    if (c)
57      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
58  }
59  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
60
61  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
62    uptr c = SCMap::ClassID(s);
63    CHECK_LT(c, SCMap::kNumClasses);
64    CHECK_GE(SCMap::Size(c), s);
65    if (c > 0)
66      CHECK_LT(SCMap::Size(c-1), s);
67  }
68}
69
70TEST(SanitizerCommon, DefaultSizeClassMap) {
71  TestSizeClassMap<DefaultSizeClassMap>();
72}
73
74TEST(SanitizerCommon, CompactSizeClassMap) {
75  TestSizeClassMap<CompactSizeClassMap>();
76}
77
78template <class Allocator>
79void TestSizeClassAllocator() {
80  Allocator *a = new Allocator;
81  a->Init();
82
83  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
84    50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
85
86  std::vector<void *> allocated;
87
88  uptr last_total_allocated = 0;
89  for (int i = 0; i < 5; i++) {
90    // Allocate a bunch of chunks.
91    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
92      uptr size = sizes[s];
93      if (!a->CanAllocate(size, 1)) continue;
94      // printf("s = %ld\n", size);
95      uptr n_iter = std::max((uptr)2, 1000000 / size);
96      for (uptr i = 0; i < n_iter; i++) {
97        void *x = a->Allocate(size, 1);
98        allocated.push_back(x);
99        CHECK_EQ(x, a->GetBlockBegin(x));
100        CHECK_EQ(x, a->GetBlockBegin((char*)x + size - 1));
101        CHECK(a->PointerIsMine(x));
102        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
103        uptr class_id = a->GetSizeClass(x);
104        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
105        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
106        metadata[0] = reinterpret_cast<uptr>(x) + 1;
107        metadata[1] = 0xABCD;
108      }
109    }
110    // Deallocate all.
111    for (uptr i = 0; i < allocated.size(); i++) {
112      void *x = allocated[i];
113      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
114      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
115      CHECK_EQ(metadata[1], 0xABCD);
116      a->Deallocate(x);
117    }
118    allocated.clear();
119    uptr total_allocated = a->TotalMemoryUsed();
120    if (last_total_allocated == 0)
121      last_total_allocated = total_allocated;
122    CHECK_EQ(last_total_allocated, total_allocated);
123  }
124
125  a->TestOnlyUnmap();
126  delete a;
127}
128
129#if SANITIZER_WORDSIZE == 64
130TEST(SanitizerCommon, SizeClassAllocator64) {
131  TestSizeClassAllocator<Allocator64>();
132}
133
134TEST(SanitizerCommon, SizeClassAllocator64Compact) {
135  TestSizeClassAllocator<Allocator64Compact>();
136}
137#endif
138
139TEST(SanitizerCommon, SizeClassAllocator32Compact) {
140  TestSizeClassAllocator<Allocator32Compact>();
141}
142
143template <class Allocator>
144void SizeClassAllocatorMetadataStress() {
145  Allocator *a = new Allocator;
146  a->Init();
147  static volatile void *sink;
148
149  const uptr kNumAllocs = 10000;
150  void *allocated[kNumAllocs];
151  for (uptr i = 0; i < kNumAllocs; i++) {
152    uptr size = (i % 4096) + 1;
153    void *x = a->Allocate(size, 1);
154    allocated[i] = x;
155  }
156  // Get Metadata kNumAllocs^2 times.
157  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
158    sink = a->GetMetaData(allocated[i % kNumAllocs]);
159  }
160  for (uptr i = 0; i < kNumAllocs; i++) {
161    a->Deallocate(allocated[i]);
162  }
163
164  a->TestOnlyUnmap();
165  (void)sink;
166  delete a;
167}
168
169#if SANITIZER_WORDSIZE == 64
170TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
171  SizeClassAllocatorMetadataStress<Allocator64>();
172}
173
174TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
175  SizeClassAllocatorMetadataStress<Allocator64Compact>();
176}
177#endif
178TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
179  SizeClassAllocatorMetadataStress<Allocator32Compact>();
180}
181
182template<class Allocator>
183void FailInAssertionOnOOM() {
184  Allocator a;
185  a.Init();
186  const uptr size = 1 << 20;
187  for (int i = 0; i < 1000000; i++) {
188    a.Allocate(size, 1);
189  }
190
191  a.TestOnlyUnmap();
192}
193
194#if SANITIZER_WORDSIZE == 64
195TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
196  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
197}
198#endif
199
200TEST(SanitizerCommon, LargeMmapAllocator) {
201  LargeMmapAllocator a;
202  a.Init();
203
204  static const int kNumAllocs = 100;
205  void *allocated[kNumAllocs];
206  static const uptr size = 1000;
207  // Allocate some.
208  for (int i = 0; i < kNumAllocs; i++) {
209    allocated[i] = a.Allocate(size, 1);
210  }
211  // Deallocate all.
212  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
213  for (int i = 0; i < kNumAllocs; i++) {
214    void *p = allocated[i];
215    CHECK(a.PointerIsMine(p));
216    a.Deallocate(p);
217  }
218  // Check that non left.
219  CHECK_EQ(a.TotalMemoryUsed(), 0);
220
221  // Allocate some more, also add metadata.
222  for (int i = 0; i < kNumAllocs; i++) {
223    void *x = a.Allocate(size, 1);
224    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
225    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
226    *meta = i;
227    allocated[i] = x;
228  }
229  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
230  // Deallocate all in reverse order.
231  for (int i = 0; i < kNumAllocs; i++) {
232    int idx = kNumAllocs - i - 1;
233    void *p = allocated[idx];
234    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
235    CHECK_EQ(*meta, idx);
236    CHECK(a.PointerIsMine(p));
237    a.Deallocate(p);
238  }
239  CHECK_EQ(a.TotalMemoryUsed(), 0);
240  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
241  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
242    for (int i = 0; i < kNumAllocs; i++) {
243      uptr size = ((i % 10) + 1) * 4096;
244      allocated[i] = a.Allocate(size, alignment);
245      CHECK_EQ(0, (uptr)allocated[i] % alignment);
246      char *p = (char*)allocated[i];
247      p[0] = p[size - 1] = 0;
248    }
249    for (int i = 0; i < kNumAllocs; i++) {
250      a.Deallocate(allocated[i]);
251    }
252  }
253}
254
255template
256<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
257void TestCombinedAllocator() {
258  typedef
259      CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
260      Allocator;
261  Allocator *a = new Allocator;
262  a->Init();
263
264  AllocatorCache cache;
265  cache.Init();
266
267  EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
268  EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
269  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
270  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
271  EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
272
273  const uptr kNumAllocs = 100000;
274  const uptr kNumIter = 10;
275  for (uptr iter = 0; iter < kNumIter; iter++) {
276    std::vector<void*> allocated;
277    for (uptr i = 0; i < kNumAllocs; i++) {
278      uptr size = (i % (1 << 14)) + 1;
279      if ((i % 1024) == 0)
280        size = 1 << (10 + (i % 14));
281      void *x = a->Allocate(&cache, size, 1);
282      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
283      CHECK_EQ(*meta, 0);
284      *meta = size;
285      allocated.push_back(x);
286    }
287
288    random_shuffle(allocated.begin(), allocated.end());
289
290    for (uptr i = 0; i < kNumAllocs; i++) {
291      void *x = allocated[i];
292      uptr *meta = reinterpret_cast<uptr*>(a->GetMetaData(x));
293      CHECK_NE(*meta, 0);
294      CHECK(a->PointerIsMine(x));
295      *meta = 0;
296      a->Deallocate(&cache, x);
297    }
298    allocated.clear();
299    a->SwallowCache(&cache);
300  }
301  a->TestOnlyUnmap();
302}
303
304#if SANITIZER_WORDSIZE == 64
305TEST(SanitizerCommon, CombinedAllocator64) {
306  TestCombinedAllocator<Allocator64,
307      LargeMmapAllocator,
308      SizeClassAllocatorLocalCache<Allocator64> > ();
309}
310
311TEST(SanitizerCommon, CombinedAllocator64Compact) {
312  TestCombinedAllocator<Allocator64Compact,
313      LargeMmapAllocator,
314      SizeClassAllocatorLocalCache<Allocator64Compact> > ();
315}
316#endif
317
318TEST(SanitizerCommon, CombinedAllocator32Compact) {
319  TestCombinedAllocator<Allocator32Compact,
320      LargeMmapAllocator,
321      SizeClassAllocatorLocalCache<Allocator32Compact> > ();
322}
323
324template <class AllocatorCache>
325void TestSizeClassAllocatorLocalCache() {
326  static THREADLOCAL AllocatorCache static_allocator_cache;
327  static_allocator_cache.Init();
328  AllocatorCache cache;
329  typedef typename AllocatorCache::Allocator Allocator;
330  Allocator *a = new Allocator();
331
332  a->Init();
333  cache.Init();
334
335  const uptr kNumAllocs = 10000;
336  const int kNumIter = 100;
337  uptr saved_total = 0;
338  for (int i = 0; i < kNumIter; i++) {
339    void *allocated[kNumAllocs];
340    for (uptr i = 0; i < kNumAllocs; i++) {
341      allocated[i] = cache.Allocate(a, 0);
342    }
343    for (uptr i = 0; i < kNumAllocs; i++) {
344      cache.Deallocate(a, 0, allocated[i]);
345    }
346    cache.Drain(a);
347    uptr total_allocated = a->TotalMemoryUsed();
348    if (saved_total)
349      CHECK_EQ(saved_total, total_allocated);
350    saved_total = total_allocated;
351  }
352
353  a->TestOnlyUnmap();
354  delete a;
355}
356
357#if SANITIZER_WORDSIZE == 64
358TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
359  TestSizeClassAllocatorLocalCache<
360      SizeClassAllocatorLocalCache<Allocator64> >();
361}
362
363TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
364  TestSizeClassAllocatorLocalCache<
365      SizeClassAllocatorLocalCache<Allocator64Compact> >();
366}
367#endif
368
369TEST(SanitizerCommon, SizeClassAllocator32CompactLocalCache) {
370  TestSizeClassAllocatorLocalCache<
371      SizeClassAllocatorLocalCache<Allocator32Compact> >();
372}
373
374TEST(Allocator, Basic) {
375  char *p = (char*)InternalAlloc(10);
376  EXPECT_NE(p, (char*)0);
377  char *p2 = (char*)InternalAlloc(20);
378  EXPECT_NE(p2, (char*)0);
379  EXPECT_NE(p2, p);
380  InternalFree(p);
381  InternalFree(p2);
382}
383
384TEST(Allocator, Stress) {
385  const int kCount = 1000;
386  char *ptrs[kCount];
387  unsigned rnd = 42;
388  for (int i = 0; i < kCount; i++) {
389    uptr sz = rand_r(&rnd) % 1000;
390    char *p = (char*)InternalAlloc(sz);
391    EXPECT_NE(p, (char*)0);
392    ptrs[i] = p;
393  }
394  for (int i = 0; i < kCount; i++) {
395    InternalFree(ptrs[i]);
396  }
397}
398
399TEST(Allocator, ScopedBuffer) {
400  const int kSize = 512;
401  {
402    InternalScopedBuffer<int> int_buf(kSize);
403    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
404  }
405  InternalScopedBuffer<char> char_buf(kSize);
406  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
407  memset(char_buf.data(), 'c', kSize);
408  for (int i = 0; i < kSize; i++) {
409    EXPECT_EQ('c', char_buf[i]);
410  }
411}
412