sanitizer_allocator_test.cc revision 45595ba6b77b54b8212389cba6b95dc634122145
1//===-- sanitizer_allocator_test.cc ---------------------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Tests for sanitizer_allocator.h.
12//
13//===----------------------------------------------------------------------===//
14#include "sanitizer_common/sanitizer_allocator.h"
15#include "sanitizer_common/sanitizer_common.h"
16
17#include "gtest/gtest.h"
18
19#include <stdlib.h>
20#include <algorithm>
21#include <vector>
22
23#if SANITIZER_WORDSIZE == 64
24static const uptr kAllocatorSpace = 0x700000000000ULL;
25static const uptr kAllocatorSize  = 0x010000000000ULL;  // 1T.
26static const u64 kAddressSpaceSize = 1ULL << 47;
27
28typedef SizeClassAllocator64<
29  kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
30
31typedef SizeClassAllocator64<
32  kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
33#else
34static const u64 kAddressSpaceSize = 1ULL << 32;
35#endif
36
37typedef SizeClassAllocator32<
38  0, kAddressSpaceSize, 16, CompactSizeClassMap> Allocator32Compact;
39
40template <class SizeClassMap>
41void TestSizeClassMap() {
42  typedef SizeClassMap SCMap;
43#if 0
44  for (uptr i = 0; i < SCMap::kNumClasses; i++) {
45    printf("c%ld => %ld (%lx) cached=%ld(%ld)\n",
46        i, SCMap::Size(i), SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
47        SCMap::MaxCached(i));
48  }
49#endif
50  for (uptr c = 0; c < SCMap::kNumClasses; c++) {
51    uptr s = SCMap::Size(c);
52    CHECK_EQ(SCMap::ClassID(s), c);
53    if (c != SCMap::kNumClasses - 1)
54      CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
55    CHECK_EQ(SCMap::ClassID(s - 1), c);
56    if (c)
57      CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
58  }
59  CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
60
61  for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
62    uptr c = SCMap::ClassID(s);
63    CHECK_LT(c, SCMap::kNumClasses);
64    CHECK_GE(SCMap::Size(c), s);
65    if (c > 0)
66      CHECK_LT(SCMap::Size(c-1), s);
67  }
68}
69
70TEST(SanitizerCommon, DefaultSizeClassMap) {
71  TestSizeClassMap<DefaultSizeClassMap>();
72}
73
74TEST(SanitizerCommon, CompactSizeClassMap) {
75  TestSizeClassMap<CompactSizeClassMap>();
76}
77
78template <class Allocator>
79void TestSizeClassAllocator() {
80  Allocator *a = new Allocator;
81  a->Init();
82
83  static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
84    50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
85
86  std::vector<void *> allocated;
87
88  uptr last_total_allocated = 0;
89  for (int i = 0; i < 5; i++) {
90    // Allocate a bunch of chunks.
91    for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
92      uptr size = sizes[s];
93      if (!a->CanAllocate(size, 1)) continue;
94      // printf("s = %ld\n", size);
95      uptr n_iter = std::max((uptr)2, 1000000 / size);
96      for (uptr i = 0; i < n_iter; i++) {
97        void *x = a->Allocate(size, 1);
98        allocated.push_back(x);
99        CHECK(a->PointerIsMine(x));
100        CHECK_GE(a->GetActuallyAllocatedSize(x), size);
101        uptr class_id = a->GetSizeClass(x);
102        CHECK_EQ(class_id, Allocator::SizeClassMapT::ClassID(size));
103        uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
104        metadata[0] = reinterpret_cast<uptr>(x) + 1;
105        metadata[1] = 0xABCD;
106      }
107    }
108    // Deallocate all.
109    for (uptr i = 0; i < allocated.size(); i++) {
110      void *x = allocated[i];
111      uptr *metadata = reinterpret_cast<uptr*>(a->GetMetaData(x));
112      CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
113      CHECK_EQ(metadata[1], 0xABCD);
114      a->Deallocate(x);
115    }
116    allocated.clear();
117    uptr total_allocated = a->TotalMemoryUsed();
118    if (last_total_allocated == 0)
119      last_total_allocated = total_allocated;
120    CHECK_EQ(last_total_allocated, total_allocated);
121  }
122
123  a->TestOnlyUnmap();
124  delete a;
125}
126
127#if SANITIZER_WORDSIZE == 64
128TEST(SanitizerCommon, SizeClassAllocator64) {
129  TestSizeClassAllocator<Allocator64>();
130}
131
132TEST(SanitizerCommon, SizeClassAllocator64Compact) {
133  TestSizeClassAllocator<Allocator64Compact>();
134}
135#endif
136
137TEST(SanitizerCommon, SizeClassAllocator32Compact) {
138  TestSizeClassAllocator<Allocator32Compact>();
139}
140
141template <class Allocator>
142void SizeClassAllocator64MetadataStress() {
143  Allocator a;
144  a.Init();
145  static volatile void *sink;
146
147  const uptr kNumAllocs = 10000;
148  void *allocated[kNumAllocs];
149  for (uptr i = 0; i < kNumAllocs; i++) {
150    uptr size = (i % 4096) + 1;
151    void *x = a.Allocate(size, 1);
152    allocated[i] = x;
153  }
154  // Get Metadata kNumAllocs^2 times.
155  for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
156    sink = a.GetMetaData(allocated[i % kNumAllocs]);
157  }
158  for (uptr i = 0; i < kNumAllocs; i++) {
159    a.Deallocate(allocated[i]);
160  }
161
162  a.TestOnlyUnmap();
163  (void)sink;
164}
165
166#if SANITIZER_WORDSIZE == 64
167TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
168  SizeClassAllocator64MetadataStress<Allocator64>();
169}
170
171TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
172  SizeClassAllocator64MetadataStress<Allocator64Compact>();
173}
174#endif
175
176template<class Allocator>
177void FailInAssertionOnOOM() {
178  Allocator a;
179  a.Init();
180  const uptr size = 1 << 20;
181  for (int i = 0; i < 1000000; i++) {
182    a.Allocate(size, 1);
183  }
184
185  a.TestOnlyUnmap();
186}
187
188#if SANITIZER_WORDSIZE == 64
189TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
190  EXPECT_DEATH(FailInAssertionOnOOM<Allocator64>(), "Out of memory");
191}
192#endif
193
194TEST(SanitizerCommon, LargeMmapAllocator) {
195  LargeMmapAllocator a;
196  a.Init();
197
198  static const int kNumAllocs = 100;
199  void *allocated[kNumAllocs];
200  static const uptr size = 1000;
201  // Allocate some.
202  for (int i = 0; i < kNumAllocs; i++) {
203    allocated[i] = a.Allocate(size, 1);
204  }
205  // Deallocate all.
206  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
207  for (int i = 0; i < kNumAllocs; i++) {
208    void *p = allocated[i];
209    CHECK(a.PointerIsMine(p));
210    a.Deallocate(p);
211  }
212  // Check that non left.
213  CHECK_EQ(a.TotalMemoryUsed(), 0);
214
215  // Allocate some more, also add metadata.
216  for (int i = 0; i < kNumAllocs; i++) {
217    void *x = a.Allocate(size, 1);
218    CHECK_GE(a.GetActuallyAllocatedSize(x), size);
219    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
220    *meta = i;
221    allocated[i] = x;
222  }
223  CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
224  // Deallocate all in reverse order.
225  for (int i = 0; i < kNumAllocs; i++) {
226    int idx = kNumAllocs - i - 1;
227    void *p = allocated[idx];
228    uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
229    CHECK_EQ(*meta, idx);
230    CHECK(a.PointerIsMine(p));
231    a.Deallocate(p);
232  }
233  CHECK_EQ(a.TotalMemoryUsed(), 0);
234  uptr max_alignment = SANITIZER_WORDSIZE == 64 ? (1 << 28) : (1 << 24);
235  for (uptr alignment = 8; alignment <= max_alignment; alignment *= 2) {
236    for (int i = 0; i < kNumAllocs; i++) {
237      uptr size = ((i % 10) + 1) * 4096;
238      allocated[i] = a.Allocate(size, alignment);
239      CHECK_EQ(0, (uptr)allocated[i] % alignment);
240      char *p = (char*)allocated[i];
241      p[0] = p[size - 1] = 0;
242    }
243    for (int i = 0; i < kNumAllocs; i++) {
244      a.Deallocate(allocated[i]);
245    }
246  }
247}
248
249template
250<class PrimaryAllocator, class SecondaryAllocator, class AllocatorCache>
251void TestCombinedAllocator() {
252  CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator> a;
253  a.Init();
254
255  AllocatorCache cache;
256  cache.Init();
257
258  EXPECT_EQ(a.Allocate(&cache, -1, 1), (void*)0);
259  EXPECT_EQ(a.Allocate(&cache, -1, 1024), (void*)0);
260  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
261  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
262  EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
263
264  const uptr kNumAllocs = 100000;
265  const uptr kNumIter = 10;
266  for (uptr iter = 0; iter < kNumIter; iter++) {
267    std::vector<void*> allocated;
268    for (uptr i = 0; i < kNumAllocs; i++) {
269      uptr size = (i % (1 << 14)) + 1;
270      if ((i % 1024) == 0)
271        size = 1 << (10 + (i % 14));
272      void *x = a.Allocate(&cache, size, 1);
273      uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
274      CHECK_EQ(*meta, 0);
275      *meta = size;
276      allocated.push_back(x);
277    }
278
279    random_shuffle(allocated.begin(), allocated.end());
280
281    for (uptr i = 0; i < kNumAllocs; i++) {
282      void *x = allocated[i];
283      uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
284      CHECK_NE(*meta, 0);
285      CHECK(a.PointerIsMine(x));
286      *meta = 0;
287      a.Deallocate(&cache, x);
288    }
289    allocated.clear();
290    a.SwallowCache(&cache);
291  }
292  a.TestOnlyUnmap();
293}
294
295#if SANITIZER_WORDSIZE == 64
296TEST(SanitizerCommon, CombinedAllocator) {
297  TestCombinedAllocator<Allocator64,
298      LargeMmapAllocator,
299      SizeClassAllocatorLocalCache<Allocator64> > ();
300}
301#endif
302
303template <class AllocatorCache>
304void TestSizeClassAllocatorLocalCache() {
305  static THREADLOCAL AllocatorCache static_allocator_cache;
306  static_allocator_cache.Init();
307  AllocatorCache cache;
308  typename AllocatorCache::Allocator a;
309
310  a.Init();
311  cache.Init();
312
313  const uptr kNumAllocs = 10000;
314  const int kNumIter = 100;
315  uptr saved_total = 0;
316  for (int i = 0; i < kNumIter; i++) {
317    void *allocated[kNumAllocs];
318    for (uptr i = 0; i < kNumAllocs; i++) {
319      allocated[i] = cache.Allocate(&a, 0);
320    }
321    for (uptr i = 0; i < kNumAllocs; i++) {
322      cache.Deallocate(&a, 0, allocated[i]);
323    }
324    cache.Drain(&a);
325    uptr total_allocated = a.TotalMemoryUsed();
326    if (saved_total)
327      CHECK_EQ(saved_total, total_allocated);
328    saved_total = total_allocated;
329  }
330
331  a.TestOnlyUnmap();
332}
333
334#if SANITIZER_WORDSIZE == 64
335TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
336  TestSizeClassAllocatorLocalCache<
337      SizeClassAllocatorLocalCache<Allocator64> >();
338}
339#endif
340
341TEST(Allocator, Basic) {
342  char *p = (char*)InternalAlloc(10);
343  EXPECT_NE(p, (char*)0);
344  char *p2 = (char*)InternalAlloc(20);
345  EXPECT_NE(p2, (char*)0);
346  EXPECT_NE(p2, p);
347  InternalFree(p);
348  InternalFree(p2);
349}
350
351TEST(Allocator, Stress) {
352  const int kCount = 1000;
353  char *ptrs[kCount];
354  unsigned rnd = 42;
355  for (int i = 0; i < kCount; i++) {
356    uptr sz = rand_r(&rnd) % 1000;
357    char *p = (char*)InternalAlloc(sz);
358    EXPECT_NE(p, (char*)0);
359    ptrs[i] = p;
360  }
361  for (int i = 0; i < kCount; i++) {
362    InternalFree(ptrs[i]);
363  }
364}
365
366TEST(Allocator, ScopedBuffer) {
367  const int kSize = 512;
368  {
369    InternalScopedBuffer<int> int_buf(kSize);
370    EXPECT_EQ(sizeof(int) * kSize, int_buf.size());  // NOLINT
371  }
372  InternalScopedBuffer<char> char_buf(kSize);
373  EXPECT_EQ(sizeof(char) * kSize, char_buf.size());  // NOLINT
374  memset(char_buf.data(), 'c', kSize);
375  for (int i = 0; i < kSize; i++) {
376    EXPECT_EQ('c', char_buf[i]);
377  }
378}
379