1// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include <stdint.h>
6#include <string.h>
7#include <map>
8
9#include "base/compiler_specific.h"
10#include "testing/gtest/include/gtest/gtest.h"
11#include "tools/android/heap_profiler/heap_profiler.h"
12
13namespace {
14
15class HeapProfilerTest : public testing::Test {
16 public:
17  virtual void SetUp() OVERRIDE { heap_profiler_init(&stats_); }
18
19  virtual void TearDown() OVERRIDE {
20    CheckAllocVsStacktaceConsistency();
21    heap_profiler_cleanup();
22  }
23
24 protected:
25  struct StackTrace {
26    uintptr_t frames[HEAP_PROFILER_MAX_DEPTH];
27    size_t depth;
28  };
29
30  StackTrace GenStackTrace(size_t depth, uintptr_t base) {
31    assert(depth <= HEAP_PROFILER_MAX_DEPTH);
32    StackTrace st;
33    for (size_t i = 0; i < depth; ++i)
34      st.frames[i] = base + i * 0x10UL;
35    st.depth = depth;
36    return st;
37  }
38
39  void ExpectAlloc(uintptr_t start,
40                   uintptr_t end,
41                   const StackTrace& st,
42                   uint32_t flags) {
43    for (uint32_t i = 0; i < stats_.max_allocs; ++i) {
44      const Alloc& alloc = stats_.allocs[i];
45      if (alloc.start != start || alloc.end != end)
46        continue;
47      // Check that the stack trace match.
48      for (uint32_t j = 0; j < st.depth; ++j) {
49        EXPECT_EQ(st.frames[j], alloc.st->frames[j])
50            << "Stacktrace not matching @ depth " << j;
51      }
52      EXPECT_EQ(flags, alloc.flags);
53      return;
54    }
55
56    FAIL() << "Alloc not found [" << std::hex << start << "," << end << "]";
57  }
58
59  void CheckAllocVsStacktaceConsistency() {
60    uint32_t allocs_seen = 0;
61    uint32_t stack_traces_seen = 0;
62    std::map<StacktraceEntry*, uintptr_t> stacktrace_bytes_by_alloc;
63
64    for (uint32_t i = 0; i < stats_.max_allocs; ++i) {
65      Alloc* alloc = &stats_.allocs[i];
66      if (alloc->start == 0 && alloc->end == 0)
67        continue;
68      ++allocs_seen;
69      stacktrace_bytes_by_alloc[alloc->st] += alloc->end - alloc->start + 1;
70    }
71
72    for (uint32_t i = 0; i < stats_.max_stack_traces; ++i) {
73      StacktraceEntry* st = &stats_.stack_traces[i];
74      if (st->alloc_bytes == 0)
75        continue;
76      ++stack_traces_seen;
77      EXPECT_EQ(1, stacktrace_bytes_by_alloc.count(st));
78      EXPECT_EQ(stacktrace_bytes_by_alloc[st], st->alloc_bytes);
79    }
80
81    EXPECT_EQ(allocs_seen, stats_.num_allocs);
82    EXPECT_EQ(stack_traces_seen, stats_.num_stack_traces);
83  }
84
85  HeapStats stats_;
86};
87
88TEST_F(HeapProfilerTest, SimpleAlloc) {
89  StackTrace st1 = GenStackTrace(8, 0x0);
90  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
91  heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
92
93  EXPECT_EQ(2, stats_.num_allocs);
94  EXPECT_EQ(1, stats_.num_stack_traces);
95  EXPECT_EQ(1024 + 2048, stats_.total_alloc_bytes);
96  ExpectAlloc(0x1000, 0x13ff, st1, 0);
97  ExpectAlloc(0x2000, 0x27ff, st1, 0);
98}
99
100TEST_F(HeapProfilerTest, AllocMultipleStacks) {
101  StackTrace st1 = GenStackTrace(8, 0x0);
102  StackTrace st2 = GenStackTrace(4, 0x1000);
103  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
104  heap_profiler_alloc((void*)0x2000, 2048, st2.frames, st2.depth, 0);
105  heap_profiler_alloc((void*)0x3000, 32, st1.frames, st1.depth, 0);
106
107  EXPECT_EQ(3, stats_.num_allocs);
108  EXPECT_EQ(2, stats_.num_stack_traces);
109  EXPECT_EQ(1024 + 2048 + 32, stats_.total_alloc_bytes);
110  ExpectAlloc(0x1000, 0x13ff, st1, 0);
111  ExpectAlloc(0x2000, 0x27ff, st2, 0);
112  ExpectAlloc(0x3000, 0x301f, st1, 0);
113}
114
115TEST_F(HeapProfilerTest, SimpleAllocAndFree) {
116  StackTrace st1 = GenStackTrace(8, 0x0);
117  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
118  heap_profiler_free((void*)0x1000, 1024, NULL);
119
120  EXPECT_EQ(0, stats_.num_allocs);
121  EXPECT_EQ(0, stats_.num_stack_traces);
122  EXPECT_EQ(0, stats_.total_alloc_bytes);
123}
124
125TEST_F(HeapProfilerTest, Realloc) {
126  StackTrace st1 = GenStackTrace(8, 0);
127  heap_profiler_alloc((void*)0, 32, st1.frames, st1.depth, 0);
128  heap_profiler_alloc((void*)0, 32, st1.frames, st1.depth, 0);
129}
130
131TEST_F(HeapProfilerTest, AllocAndFreeMultipleStacks) {
132  StackTrace st1 = GenStackTrace(8, 0x0);
133  StackTrace st2 = GenStackTrace(6, 0x1000);
134  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
135  heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
136  heap_profiler_alloc((void*)0x3000, 32, st2.frames, st2.depth, 0);
137  heap_profiler_alloc((void*)0x4000, 64, st2.frames, st2.depth, 0);
138
139  heap_profiler_free((void*)0x1000, 1024, NULL);
140  heap_profiler_free((void*)0x3000, 32, NULL);
141
142  EXPECT_EQ(2, stats_.num_allocs);
143  EXPECT_EQ(2, stats_.num_stack_traces);
144  EXPECT_EQ(2048 + 64, stats_.total_alloc_bytes);
145  ExpectAlloc(0x2000, 0x27ff, st1, 0);
146  ExpectAlloc(0x4000, 0x403f, st2, 0);
147}
148
149TEST_F(HeapProfilerTest, AllocAndFreeAll) {
150  StackTrace st1 = GenStackTrace(8, 0x0);
151  StackTrace st2 = GenStackTrace(6, 0x1000);
152  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
153  heap_profiler_alloc((void*)0x2000, 2048, st1.frames, st1.depth, 0);
154  heap_profiler_alloc((void*)0x3000, 32, st2.frames, st2.depth, 0);
155  heap_profiler_alloc((void*)0x4000, 64, st2.frames, st2.depth, 0);
156
157  heap_profiler_free((void*)0x1000, 1024, NULL);
158  heap_profiler_free((void*)0x2000, 2048, NULL);
159  heap_profiler_free((void*)0x3000, 32, NULL);
160  heap_profiler_free((void*)0x4000, 64, NULL);
161
162  EXPECT_EQ(0, stats_.num_allocs);
163  EXPECT_EQ(0, stats_.num_stack_traces);
164  EXPECT_EQ(0, stats_.total_alloc_bytes);
165}
166
167TEST_F(HeapProfilerTest, AllocAndFreeWithZeroSize) {
168  StackTrace st1 = GenStackTrace(8, 0x0);
169  StackTrace st2 = GenStackTrace(6, 0x1000);
170  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
171  heap_profiler_alloc((void*)0x2000, 2048, st2.frames, st2.depth, 0);
172
173  heap_profiler_free((void*)0x1000, 0, NULL);
174
175  EXPECT_EQ(1, stats_.num_allocs);
176  EXPECT_EQ(1, stats_.num_stack_traces);
177  EXPECT_EQ(2048, stats_.total_alloc_bytes);
178}
179
180TEST_F(HeapProfilerTest, AllocAndFreeContiguous) {
181  StackTrace st1 = GenStackTrace(8, 0x0);
182  StackTrace st2 = GenStackTrace(6, 0x1000);
183  heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
184  heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
185
186  heap_profiler_free((void*)0x1000, 8192, NULL);
187
188  EXPECT_EQ(0, stats_.num_allocs);
189  EXPECT_EQ(0, stats_.num_stack_traces);
190  EXPECT_EQ(0, stats_.total_alloc_bytes);
191}
192
193TEST_F(HeapProfilerTest, SparseAllocsOneLargeOuterFree) {
194  StackTrace st1 = GenStackTrace(8, 0x0);
195  StackTrace st2 = GenStackTrace(6, 0x1000);
196
197  heap_profiler_alloc((void*)0x1010, 1, st1.frames, st1.depth, 0);
198  heap_profiler_alloc((void*)0x1400, 2, st2.frames, st2.depth, 0);
199  heap_profiler_alloc((void*)0x1600, 5, st1.frames, st1.depth, 0);
200  heap_profiler_alloc((void*)0x9000, 4096, st2.frames, st2.depth, 0);
201
202  heap_profiler_free((void*)0x0800, 8192, NULL);
203  EXPECT_EQ(1, stats_.num_allocs);
204  EXPECT_EQ(1, stats_.num_stack_traces);
205  EXPECT_EQ(4096, stats_.total_alloc_bytes);
206  ExpectAlloc(0x9000, 0x9fff, st2, 0);
207}
208
209TEST_F(HeapProfilerTest, SparseAllocsOneLargePartiallyOverlappingFree) {
210  StackTrace st1 = GenStackTrace(8, 0x0);
211  StackTrace st2 = GenStackTrace(6, 0x1000);
212  StackTrace st3 = GenStackTrace(4, 0x2000);
213
214  // This will be untouched.
215  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
216
217  // These will be partially freed in one shot (% 64 a bytes "margin").
218  heap_profiler_alloc((void*)0x2000, 128, st2.frames, st2.depth, 0);
219  heap_profiler_alloc((void*)0x2400, 128, st2.frames, st2.depth, 0);
220  heap_profiler_alloc((void*)0x2f80, 128, st2.frames, st2.depth, 0);
221
222  // This will be untouched.
223  heap_profiler_alloc((void*)0x3000, 1024, st3.frames, st3.depth, 0);
224
225  heap_profiler_free((void*)0x2040, 4096 - 64 - 64, NULL);
226  EXPECT_EQ(4, stats_.num_allocs);
227  EXPECT_EQ(3, stats_.num_stack_traces);
228  EXPECT_EQ(1024 + 64 + 64 + 1024, stats_.total_alloc_bytes);
229
230  ExpectAlloc(0x1000, 0x13ff, st1, 0);
231  ExpectAlloc(0x2000, 0x203f, st2, 0);
232  ExpectAlloc(0x2fc0, 0x2fff, st2, 0);
233  ExpectAlloc(0x3000, 0x33ff, st3, 0);
234}
235
236TEST_F(HeapProfilerTest, AllocAndFreeScattered) {
237  StackTrace st1 = GenStackTrace(8, 0x0);
238  heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
239  heap_profiler_alloc((void*)0x2000, 4096, st1.frames, st1.depth, 0);
240  heap_profiler_alloc((void*)0x3000, 4096, st1.frames, st1.depth, 0);
241  heap_profiler_alloc((void*)0x4000, 4096, st1.frames, st1.depth, 0);
242
243  heap_profiler_free((void*)0x800, 4096, NULL);
244  EXPECT_EQ(4, stats_.num_allocs);
245  EXPECT_EQ(2048 + 4096 + 4096 + 4096, stats_.total_alloc_bytes);
246
247  heap_profiler_free((void*)0x1800, 4096, NULL);
248  EXPECT_EQ(3, stats_.num_allocs);
249  EXPECT_EQ(2048 + 4096 + 4096, stats_.total_alloc_bytes);
250
251  heap_profiler_free((void*)0x2800, 4096, NULL);
252  EXPECT_EQ(2, stats_.num_allocs);
253  EXPECT_EQ(2048 + 4096, stats_.total_alloc_bytes);
254
255  heap_profiler_free((void*)0x3800, 4096, NULL);
256  EXPECT_EQ(1, stats_.num_allocs);
257  EXPECT_EQ(2048, stats_.total_alloc_bytes);
258
259  heap_profiler_free((void*)0x4800, 4096, NULL);
260  EXPECT_EQ(0, stats_.num_allocs);
261  EXPECT_EQ(0, stats_.num_stack_traces);
262  EXPECT_EQ(0, stats_.total_alloc_bytes);
263}
264
265TEST_F(HeapProfilerTest, AllocAndOverFreeContiguous) {
266  StackTrace st1 = GenStackTrace(8, 0x0);
267  StackTrace st2 = GenStackTrace(6, 0x1000);
268  heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
269  heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
270
271  heap_profiler_free((void*)0, 16834, NULL);
272
273  EXPECT_EQ(0, stats_.num_allocs);
274  EXPECT_EQ(0, stats_.num_stack_traces);
275  EXPECT_EQ(0, stats_.total_alloc_bytes);
276}
277
278TEST_F(HeapProfilerTest, AllocContiguousAndPunchHole) {
279  StackTrace st1 = GenStackTrace(8, 0x0);
280  StackTrace st2 = GenStackTrace(6, 0x1000);
281  heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
282  heap_profiler_alloc((void*)0x2000, 4096, st2.frames, st2.depth, 0);
283
284  // Punch a 4k hole in the middle of the two contiguous 4k allocs.
285  heap_profiler_free((void*)0x1800, 4096, NULL);
286
287  EXPECT_EQ(2, stats_.num_allocs);
288  EXPECT_EQ(2, stats_.num_stack_traces);
289  EXPECT_EQ(4096, stats_.total_alloc_bytes);
290}
291
292TEST_F(HeapProfilerTest, AllocAndPartialFree) {
293  StackTrace st1 = GenStackTrace(8, 0x0);
294  StackTrace st2 = GenStackTrace(6, 0x1000);
295  StackTrace st3 = GenStackTrace(7, 0x2000);
296  StackTrace st4 = GenStackTrace(9, 0x3000);
297  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
298  heap_profiler_alloc((void*)0x2000, 1024, st2.frames, st2.depth, 0);
299  heap_profiler_alloc((void*)0x3000, 1024, st3.frames, st3.depth, 0);
300  heap_profiler_alloc((void*)0x4000, 1024, st4.frames, st4.depth, 0);
301
302  heap_profiler_free((void*)0x1000, 128, NULL);  // Shrink left by 128B.
303  heap_profiler_free((void*)0x2380, 128, NULL);  // Shrink right by 128B.
304  heap_profiler_free((void*)0x3100, 512, NULL);  // 512B hole in the middle.
305  heap_profiler_free((void*)0x4000, 512, NULL);  // Free up the 4th alloc...
306  heap_profiler_free((void*)0x4200, 512, NULL);  // ...but do it in two halves.
307
308  EXPECT_EQ(4, stats_.num_allocs);  // 1 + 2 + two sides around the hole 3.
309  EXPECT_EQ(3, stats_.num_stack_traces);  // st4 should be gone.
310  EXPECT_EQ(896 + 896 + 512, stats_.total_alloc_bytes);
311}
312
313TEST_F(HeapProfilerTest, RandomIndividualAllocAndFrees) {
314  static const size_t NUM_ST = 128;
315  static const size_t NUM_OPS = 1000;
316
317  StackTrace st[NUM_ST];
318  for (uint32_t i = 0; i < NUM_ST; ++i)
319    st[i] = GenStackTrace((i % 10) + 2, i * 128);
320
321  for (size_t i = 0; i < NUM_OPS; ++i) {
322    uintptr_t start = ((i + 7) << 8) & (0xffffff);
323    size_t size = (start >> 16) & 0x0fff;
324    if (i & 1) {
325      StackTrace* s = &st[start % NUM_ST];
326      heap_profiler_alloc((void*)start, size, s->frames, s->depth, 0);
327    } else {
328      heap_profiler_free((void*)start, size, NULL);
329    }
330    CheckAllocVsStacktaceConsistency();
331  }
332}
333
334TEST_F(HeapProfilerTest, RandomAllocAndFreesBatches) {
335  static const size_t NUM_ST = 128;
336  static const size_t NUM_ALLOCS = 100;
337
338  StackTrace st[NUM_ST];
339  for (size_t i = 0; i < NUM_ST; ++i)
340    st[i] = GenStackTrace((i % 10) + 2, i * NUM_ST);
341
342  for (int repeat = 0; repeat < 5; ++repeat) {
343    for (size_t i = 0; i < NUM_ALLOCS; ++i) {
344      StackTrace* s = &st[i % NUM_ST];
345      heap_profiler_alloc(
346          (void*)(i * 4096), ((i + 1) * 32) % 4097, s->frames, s->depth, 0);
347      CheckAllocVsStacktaceConsistency();
348    }
349
350    for (size_t i = 0; i < NUM_ALLOCS; ++i) {
351      heap_profiler_free((void*)(i * 1024), ((i + 1) * 64) % 16000, NULL);
352      CheckAllocVsStacktaceConsistency();
353    }
354  }
355}
356
357TEST_F(HeapProfilerTest, UnwindStackTooLargeShouldSaturate) {
358  StackTrace st1 = GenStackTrace(HEAP_PROFILER_MAX_DEPTH, 0x0);
359  uintptr_t many_frames[100] = {};
360  memcpy(many_frames, st1.frames, sizeof(uintptr_t) * st1.depth);
361  heap_profiler_alloc((void*)0x1000, 1024, many_frames, 100, 0);
362  ExpectAlloc(0x1000, 0x13ff, st1, 0);
363}
364
365TEST_F(HeapProfilerTest, NoUnwindShouldNotCrashButNoop) {
366  heap_profiler_alloc((void*)0x1000, 1024, NULL, 0, 0);
367  EXPECT_EQ(0, stats_.num_allocs);
368  EXPECT_EQ(0, stats_.num_stack_traces);
369  EXPECT_EQ(0, stats_.total_alloc_bytes);
370}
371
372TEST_F(HeapProfilerTest, FreeNonExisting) {
373  StackTrace st1 = GenStackTrace(5, 0x0);
374  heap_profiler_free((void*)0x1000, 1024, NULL);
375  heap_profiler_free((void*)0x1400, 1024, NULL);
376  EXPECT_EQ(0, stats_.num_allocs);
377  EXPECT_EQ(0, stats_.num_stack_traces);
378  EXPECT_EQ(0, stats_.total_alloc_bytes);
379  heap_profiler_alloc((void*)0x1000, 1024, st1.frames, st1.depth, 0);
380  EXPECT_EQ(1, stats_.num_allocs);
381  EXPECT_EQ(1024, stats_.total_alloc_bytes);
382}
383
384TEST_F(HeapProfilerTest, FlagsConsistency) {
385  StackTrace st1 = GenStackTrace(8, 0x0);
386  uint32_t flags = 0;
387  heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 42);
388  heap_profiler_alloc((void*)0x2000, 4096, st1.frames, st1.depth, 142);
389
390  ExpectAlloc(0x1000, 0x1fff, st1, 42);
391  ExpectAlloc(0x2000, 0x2fff, st1, 142);
392
393  // Punch a 4k hole in the middle of the two contiguous 4k allocs.
394  heap_profiler_free((void*)0x1800, 4096, NULL);
395
396  ExpectAlloc(0x1000, 0x17ff, st1, 42);
397  heap_profiler_free((void*)0x1000, 2048, &flags);
398  EXPECT_EQ(42, flags);
399
400  ExpectAlloc(0x2800, 0x2fff, st1, 142);
401  heap_profiler_free((void*)0x2800, 2048, &flags);
402  EXPECT_EQ(142, flags);
403}
404
405TEST_F(HeapProfilerTest, BeConsistentOnOOM) {
406  static const size_t NUM_ALLOCS = 512 * 1024;
407  uintptr_t frames[1];
408
409  for (uintptr_t i = 0; i < NUM_ALLOCS; ++i) {
410    frames[0] = i;
411    heap_profiler_alloc((void*)(i * 32), 32, frames, 1, 0);
412  }
413
414  CheckAllocVsStacktaceConsistency();
415  // Check that we're saturating, otherwise this entire test is pointless.
416  EXPECT_LT(stats_.num_allocs, NUM_ALLOCS);
417  EXPECT_LT(stats_.num_stack_traces, NUM_ALLOCS);
418
419  for (uintptr_t i = 0; i < NUM_ALLOCS; ++i)
420    heap_profiler_free((void*)(i * 32), 32, NULL);
421
422  EXPECT_EQ(0, stats_.num_allocs);
423  EXPECT_EQ(0, stats_.total_alloc_bytes);
424  EXPECT_EQ(0, stats_.num_stack_traces);
425}
426
427#ifdef __LP64__
428TEST_F(HeapProfilerTest, Test64Bit) {
429  StackTrace st1 = GenStackTrace(8, 0x0);
430  StackTrace st2 = GenStackTrace(10, 0x7fffffff70000000L);
431  StackTrace st3 = GenStackTrace(10, 0xffffffff70000000L);
432  heap_profiler_alloc((void*)0x1000, 4096, st1.frames, st1.depth, 0);
433  heap_profiler_alloc(
434      (void*)0x7ffffffffffff000L, 4096, st2.frames, st2.depth, 0);
435  heap_profiler_alloc(
436      (void*)0xfffffffffffff000L, 4096, st3.frames, st3.depth, 0);
437  EXPECT_EQ(3, stats_.num_allocs);
438  EXPECT_EQ(3, stats_.num_stack_traces);
439  EXPECT_EQ(4096 + 4096 + 4096, stats_.total_alloc_bytes);
440
441  heap_profiler_free((void*)0x1000, 4096, NULL);
442  EXPECT_EQ(2, stats_.num_allocs);
443  EXPECT_EQ(2, stats_.num_stack_traces);
444  EXPECT_EQ(4096 + 4096, stats_.total_alloc_bytes);
445
446  heap_profiler_free((void*)0x7ffffffffffff000L, 4096, NULL);
447  EXPECT_EQ(1, stats_.num_allocs);
448  EXPECT_EQ(1, stats_.num_stack_traces);
449  EXPECT_EQ(4096, stats_.total_alloc_bytes);
450
451  heap_profiler_free((void*)0xfffffffffffff000L, 4096, NULL);
452  EXPECT_EQ(0, stats_.num_allocs);
453  EXPECT_EQ(0, stats_.num_stack_traces);
454  EXPECT_EQ(0, stats_.total_alloc_bytes);
455}
456#endif
457
458}  // namespace
459