1/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "HeapWalker.h"
18#include "LeakFolding.h"
19
20#include <gtest/gtest.h>
21#include <ScopedDisableMalloc.h>
22#include "Allocator.h"
23
24class LeakFoldingTest : public ::testing::Test {
25 public:
26  LeakFoldingTest() : disable_malloc_(), heap_() {}
27
28  void TearDown() {
29    ASSERT_TRUE(heap_.empty());
30    if (!HasFailure()) {
31      ASSERT_FALSE(disable_malloc_.timed_out());
32    }
33  }
34
35 protected:
36  ScopedDisableMallocTimeout disable_malloc_;
37  Heap heap_;
38};
39
40#define buffer_begin(buffer) reinterpret_cast<uintptr_t>(&buffer[0])
41#define buffer_end(buffer) (reinterpret_cast<uintptr_t>(&buffer[0]) + sizeof(buffer))
42#define ALLOCATION(heap_walker, buffer) \
43  ASSERT_EQ(true, heap_walker.Allocation(buffer_begin(buffer), buffer_end(buffer)))
44
45TEST_F(LeakFoldingTest, one) {
46  void* buffer1[1] = {nullptr};
47
48  HeapWalker heap_walker(heap_);
49
50  ALLOCATION(heap_walker, buffer1);
51
52  LeakFolding folding(heap_, heap_walker);
53
54  ASSERT_TRUE(folding.FoldLeaks());
55
56  allocator::vector<LeakFolding::Leak> leaked(heap_);
57  size_t num_leaks = 0;
58  size_t leaked_bytes = 0;
59  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
60
61  EXPECT_EQ(1U, num_leaks);
62  EXPECT_EQ(sizeof(uintptr_t), leaked_bytes);
63  ASSERT_EQ(1U, leaked.size());
64  EXPECT_EQ(0U, leaked[0].referenced_count);
65  EXPECT_EQ(0U, leaked[0].referenced_size);
66}
67
68TEST_F(LeakFoldingTest, two) {
69  void* buffer1[1] = {nullptr};
70  void* buffer2[1] = {nullptr};
71
72  HeapWalker heap_walker(heap_);
73
74  ALLOCATION(heap_walker, buffer1);
75  ALLOCATION(heap_walker, buffer2);
76
77  LeakFolding folding(heap_, heap_walker);
78
79  ASSERT_TRUE(folding.FoldLeaks());
80
81  allocator::vector<LeakFolding::Leak> leaked(heap_);
82  size_t num_leaks = 0;
83  size_t leaked_bytes = 0;
84  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
85
86  EXPECT_EQ(2U, num_leaks);
87  EXPECT_EQ(2*sizeof(uintptr_t), leaked_bytes);
88  ASSERT_EQ(2U, leaked.size());
89  EXPECT_EQ(0U, leaked[0].referenced_count);
90  EXPECT_EQ(0U, leaked[0].referenced_size);
91  EXPECT_EQ(0U, leaked[1].referenced_count);
92  EXPECT_EQ(0U, leaked[1].referenced_size);
93}
94
95TEST_F(LeakFoldingTest, dominator) {
96  void* buffer1[1];
97  void* buffer2[1] = {nullptr};
98
99  buffer1[0] = buffer2;
100
101  HeapWalker heap_walker(heap_);
102
103  ALLOCATION(heap_walker, buffer1);
104  ALLOCATION(heap_walker, buffer2);
105
106  LeakFolding folding(heap_, heap_walker);
107
108  ASSERT_TRUE(folding.FoldLeaks());
109
110  allocator::vector<LeakFolding::Leak> leaked(heap_);
111  size_t num_leaks = 0;
112  size_t leaked_bytes = 0;
113  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
114
115  EXPECT_EQ(2U, num_leaks);
116  EXPECT_EQ(2*sizeof(uintptr_t), leaked_bytes);
117  ASSERT_EQ(1U, leaked.size());
118  EXPECT_EQ(1U, leaked[0].referenced_count);
119  EXPECT_EQ(sizeof(uintptr_t), leaked[0].referenced_size);
120}
121
122TEST_F(LeakFoldingTest, cycle) {
123  void* buffer1[1];
124  void* buffer2[1];
125  void* buffer3[1];
126
127  buffer1[0] = buffer2;
128  buffer2[0] = buffer3;
129  buffer3[0] = buffer2;
130
131  HeapWalker heap_walker(heap_);
132
133  ALLOCATION(heap_walker, buffer1);
134  ALLOCATION(heap_walker, buffer2);
135  ALLOCATION(heap_walker, buffer3);
136
137  LeakFolding folding(heap_, heap_walker);
138
139  ASSERT_TRUE(folding.FoldLeaks());
140
141  allocator::vector<LeakFolding::Leak> leaked(heap_);
142  size_t num_leaks = 0;
143  size_t leaked_bytes = 0;
144  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
145
146  EXPECT_EQ(3U, num_leaks);
147  EXPECT_EQ(3*sizeof(uintptr_t), leaked_bytes);
148  ASSERT_EQ(1U, leaked.size());
149  EXPECT_EQ(2U, leaked[0].referenced_count);
150  EXPECT_EQ(2*sizeof(uintptr_t), leaked[0].referenced_size);
151}
152
153TEST_F(LeakFoldingTest, dominator_cycle) {
154  void* buffer1[2] = {nullptr, nullptr};
155  void* buffer2[2];
156  void* buffer3[1] = {nullptr};
157
158  buffer1[0] = &buffer2;
159  buffer2[0] = &buffer1;
160  buffer2[1] = &buffer3;
161
162  HeapWalker heap_walker(heap_);
163
164  ALLOCATION(heap_walker, buffer1);
165  ALLOCATION(heap_walker, buffer2);
166  ALLOCATION(heap_walker, buffer3);
167
168  LeakFolding folding(heap_, heap_walker);
169
170  ASSERT_TRUE(folding.FoldLeaks());
171
172  allocator::vector<LeakFolding::Leak> leaked(heap_);
173  size_t num_leaks = 0;
174  size_t leaked_bytes = 0;
175  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
176
177  EXPECT_EQ(3U, num_leaks);
178  EXPECT_EQ(5*sizeof(uintptr_t), leaked_bytes);
179  ASSERT_EQ(2U, leaked.size());
180
181  EXPECT_EQ(2U, leaked[0].referenced_count);
182  EXPECT_EQ(3*sizeof(uintptr_t), leaked[0].referenced_size);
183  EXPECT_EQ(2U, leaked[1].referenced_count);
184  EXPECT_EQ(3*sizeof(uintptr_t), leaked[1].referenced_size);
185}
186
187TEST_F(LeakFoldingTest, two_cycles) {
188  void* buffer1[1];
189  void* buffer2[1];
190  void* buffer3[1];
191  void* buffer4[1];
192  void* buffer5[1];
193  void* buffer6[1];
194
195  buffer1[0] = buffer3;
196  buffer2[0] = buffer5;
197  buffer3[0] = buffer4;
198  buffer4[0] = buffer3;
199  buffer5[0] = buffer6;
200  buffer6[0] = buffer5;
201
202  HeapWalker heap_walker(heap_);
203
204  ALLOCATION(heap_walker, buffer1);
205  ALLOCATION(heap_walker, buffer2);
206  ALLOCATION(heap_walker, buffer3);
207  ALLOCATION(heap_walker, buffer4);
208  ALLOCATION(heap_walker, buffer5);
209  ALLOCATION(heap_walker, buffer6);
210
211  LeakFolding folding(heap_, heap_walker);
212
213  ASSERT_TRUE(folding.FoldLeaks());
214
215  allocator::vector<LeakFolding::Leak> leaked(heap_);
216  size_t num_leaks = 0;
217  size_t leaked_bytes = 0;
218  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
219
220  EXPECT_EQ(6U, num_leaks);
221  EXPECT_EQ(6*sizeof(uintptr_t), leaked_bytes);
222  ASSERT_EQ(2U, leaked.size());
223  EXPECT_EQ(2U, leaked[0].referenced_count);
224  EXPECT_EQ(2*sizeof(uintptr_t), leaked[0].referenced_size);
225  EXPECT_EQ(2U, leaked[1].referenced_count);
226  EXPECT_EQ(2*sizeof(uintptr_t), leaked[1].referenced_size);
227}
228
229TEST_F(LeakFoldingTest, two_dominator_cycles) {
230  void* buffer1[1];
231  void* buffer2[1];
232  void* buffer3[1];
233  void* buffer4[1];
234
235  buffer1[0] = buffer2;
236  buffer2[0] = buffer1;
237  buffer3[0] = buffer4;
238  buffer4[0] = buffer3;
239
240  HeapWalker heap_walker(heap_);
241
242  ALLOCATION(heap_walker, buffer1);
243  ALLOCATION(heap_walker, buffer2);
244  ALLOCATION(heap_walker, buffer3);
245  ALLOCATION(heap_walker, buffer4);
246
247  LeakFolding folding(heap_, heap_walker);
248
249  ASSERT_TRUE(folding.FoldLeaks());
250
251  allocator::vector<LeakFolding::Leak> leaked(heap_);
252  size_t num_leaks = 0;
253  size_t leaked_bytes = 0;
254  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
255
256  EXPECT_EQ(4U, num_leaks);
257  EXPECT_EQ(4*sizeof(uintptr_t), leaked_bytes);
258  ASSERT_EQ(4U, leaked.size());
259  EXPECT_EQ(1U, leaked[0].referenced_count);
260  EXPECT_EQ(sizeof(uintptr_t), leaked[0].referenced_size);
261  EXPECT_EQ(1U, leaked[1].referenced_count);
262  EXPECT_EQ(sizeof(uintptr_t), leaked[1].referenced_size);
263  EXPECT_EQ(1U, leaked[2].referenced_count);
264  EXPECT_EQ(sizeof(uintptr_t), leaked[2].referenced_size);
265  EXPECT_EQ(1U, leaked[3].referenced_count);
266  EXPECT_EQ(sizeof(uintptr_t), leaked[3].referenced_size);
267}
268
269TEST_F(LeakFoldingTest, giant_dominator_cycle) {
270  const size_t n = 1000;
271  void* buffer[n];
272
273  HeapWalker heap_walker(heap_);
274
275  for (size_t i = 0; i < n; i ++) {
276    ASSERT_TRUE(heap_walker.Allocation(reinterpret_cast<uintptr_t>(&buffer[i]),
277        reinterpret_cast<uintptr_t>(&buffer[i+1])));
278  }
279
280  for (size_t i = 0; i < n - 1; i++) {
281    buffer[i] = &buffer[i+1];
282  }
283  buffer[n - 1] = &buffer[0];
284
285  LeakFolding folding(heap_, heap_walker);
286
287  ASSERT_TRUE(folding.FoldLeaks());
288
289  allocator::vector<LeakFolding::Leak> leaked(heap_);
290  size_t num_leaks = 0;
291  size_t leaked_bytes = 0;
292  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
293
294  EXPECT_EQ(n, num_leaks);
295  EXPECT_EQ(n * sizeof(uintptr_t), leaked_bytes);
296  ASSERT_EQ(1000U, leaked.size());
297  EXPECT_EQ(n - 1, leaked[0].referenced_count);
298  EXPECT_EQ((n - 1) * sizeof(uintptr_t), leaked[0].referenced_size);
299}
300
301TEST_F(LeakFoldingTest, giant_cycle) {
302  const size_t n = 1000;
303  void* buffer[n];
304  void* buffer1[1];
305
306  HeapWalker heap_walker(heap_);
307
308  for (size_t i = 0; i < n - 1; i++) {
309    buffer[i] = &buffer[i+1];
310  }
311  buffer[n - 1] = &buffer[0];
312
313  buffer1[0] = &buffer[0];
314
315  for (size_t i = 0; i < n; i ++) {
316    ASSERT_TRUE(heap_walker.Allocation(reinterpret_cast<uintptr_t>(&buffer[i]),
317        reinterpret_cast<uintptr_t>(&buffer[i+1])));
318  }
319
320  ALLOCATION(heap_walker, buffer1);
321
322  LeakFolding folding(heap_, heap_walker);
323
324  ASSERT_TRUE(folding.FoldLeaks());
325
326  allocator::vector<LeakFolding::Leak> leaked(heap_);
327  size_t num_leaks = 0;
328  size_t leaked_bytes = 0;
329  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
330
331  EXPECT_EQ(n + 1, num_leaks);
332  EXPECT_EQ((n + 1) * sizeof(uintptr_t), leaked_bytes);
333  ASSERT_EQ(1U, leaked.size());
334  EXPECT_EQ(n, leaked[0].referenced_count);
335  EXPECT_EQ(n * sizeof(uintptr_t), leaked[0].referenced_size);
336}
337
338TEST_F(LeakFoldingTest, multipath) {
339  void* buffer1[2];
340  void* buffer2[1];
341  void* buffer3[1];
342  void* buffer4[1] = {nullptr};
343
344  //    1
345  //   / \
346  //  v   v
347  //  2   3
348  //   \ /
349  //    v
350  //    4
351
352  buffer1[0] = &buffer2;
353  buffer1[1] = &buffer3;
354  buffer2[0] = &buffer4;
355  buffer3[0] = &buffer4;
356
357  HeapWalker heap_walker(heap_);
358
359  ALLOCATION(heap_walker, buffer1);
360  ALLOCATION(heap_walker, buffer2);
361  ALLOCATION(heap_walker, buffer3);
362  ALLOCATION(heap_walker, buffer4);
363
364  LeakFolding folding(heap_, heap_walker);
365
366  ASSERT_TRUE(folding.FoldLeaks());
367
368  allocator::vector<LeakFolding::Leak> leaked(heap_);
369  size_t num_leaks = 0;
370  size_t leaked_bytes = 0;
371  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
372
373  EXPECT_EQ(4U, num_leaks);
374  EXPECT_EQ(5 * sizeof(uintptr_t), leaked_bytes);
375  ASSERT_EQ(1U, leaked.size());
376  EXPECT_EQ(3U, leaked[0].referenced_count);
377  EXPECT_EQ(3 * sizeof(uintptr_t), leaked[0].referenced_size);
378}
379
380TEST_F(LeakFoldingTest, multicycle) {
381  void* buffer1[2]{};
382  void* buffer2[2]{};
383  void* buffer3[2]{};
384  void* buffer4[2]{};
385
386  //    1
387  //   / ^
388  //  v   \
389  //  2 -> 3
390  //   \   ^
391  //    v /
392  //     4
393
394  buffer1[0] = &buffer2;
395  buffer2[0] = &buffer3;
396  buffer2[1] = &buffer4;
397  buffer3[0] = &buffer1;
398  buffer4[0] = &buffer3;
399
400  HeapWalker heap_walker(heap_);
401
402  ALLOCATION(heap_walker, buffer1);
403  ALLOCATION(heap_walker, buffer2);
404  ALLOCATION(heap_walker, buffer3);
405  ALLOCATION(heap_walker, buffer4);
406
407  LeakFolding folding(heap_, heap_walker);
408
409  ASSERT_TRUE(folding.FoldLeaks());
410
411  allocator::vector<LeakFolding::Leak> leaked(heap_);
412  size_t num_leaks = 0;
413  size_t leaked_bytes = 0;
414  ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
415
416  EXPECT_EQ(4U, num_leaks);
417  EXPECT_EQ(8 * sizeof(uintptr_t), leaked_bytes);
418  ASSERT_EQ(4U, leaked.size());
419  EXPECT_EQ(3U, leaked[0].referenced_count);
420  EXPECT_EQ(6 * sizeof(uintptr_t), leaked[0].referenced_size);
421  EXPECT_EQ(3U, leaked[1].referenced_count);
422  EXPECT_EQ(6 * sizeof(uintptr_t), leaked[1].referenced_size);
423  EXPECT_EQ(3U, leaked[2].referenced_count);
424  EXPECT_EQ(6 * sizeof(uintptr_t), leaked[2].referenced_size);
425  EXPECT_EQ(3U, leaked[3].referenced_count);
426  EXPECT_EQ(6 * sizeof(uintptr_t), leaked[3].referenced_size);
427}
428