1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "LeakFolding.h"
18 #include "HeapWalker.h"
19 
20 #include <ScopedDisableMalloc.h>
21 #include <gtest/gtest.h>
22 #include "Allocator.h"
23 
24 namespace android {
25 
26 class LeakFoldingTest : public ::testing::Test {
27  public:
28   LeakFoldingTest() : disable_malloc_(), heap_() {}
29 
30   void TearDown() {
31     ASSERT_TRUE(heap_.empty());
32     if (!HasFailure()) {
33       ASSERT_FALSE(disable_malloc_.timed_out());
34     }
35   }
36 
37  protected:
38   ScopedDisableMallocTimeout disable_malloc_;
39   Heap heap_;
40 };
41 
42 #define buffer_begin(buffer) reinterpret_cast<uintptr_t>(&(buffer)[0])
43 #define buffer_end(buffer) (reinterpret_cast<uintptr_t>(&(buffer)[0]) + sizeof(buffer))
44 #define ALLOCATION(heap_walker, buffer) \
45   ASSERT_EQ(true, (heap_walker).Allocation(buffer_begin(buffer), buffer_end(buffer)))
46 
47 TEST_F(LeakFoldingTest, one) {
48   void* buffer1[1] = {nullptr};
49 
50   HeapWalker heap_walker(heap_);
51 
52   ALLOCATION(heap_walker, buffer1);
53 
54   LeakFolding folding(heap_, heap_walker);
55 
56   ASSERT_TRUE(folding.FoldLeaks());
57 
58   allocator::vector<LeakFolding::Leak> leaked(heap_);
59   size_t num_leaks = 0;
60   size_t leaked_bytes = 0;
61   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
62 
63   EXPECT_EQ(1U, num_leaks);
64   EXPECT_EQ(sizeof(uintptr_t), leaked_bytes);
65   ASSERT_EQ(1U, leaked.size());
66   EXPECT_EQ(0U, leaked[0].referenced_count);
67   EXPECT_EQ(0U, leaked[0].referenced_size);
68 }
69 
70 TEST_F(LeakFoldingTest, two) {
71   void* buffer1[1] = {nullptr};
72   void* buffer2[1] = {nullptr};
73 
74   HeapWalker heap_walker(heap_);
75 
76   ALLOCATION(heap_walker, buffer1);
77   ALLOCATION(heap_walker, buffer2);
78 
79   LeakFolding folding(heap_, heap_walker);
80 
81   ASSERT_TRUE(folding.FoldLeaks());
82 
83   allocator::vector<LeakFolding::Leak> leaked(heap_);
84   size_t num_leaks = 0;
85   size_t leaked_bytes = 0;
86   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
87 
88   EXPECT_EQ(2U, num_leaks);
89   EXPECT_EQ(2 * sizeof(uintptr_t), leaked_bytes);
90   ASSERT_EQ(2U, leaked.size());
91   EXPECT_EQ(0U, leaked[0].referenced_count);
92   EXPECT_EQ(0U, leaked[0].referenced_size);
93   EXPECT_EQ(0U, leaked[1].referenced_count);
94   EXPECT_EQ(0U, leaked[1].referenced_size);
95 }
96 
97 TEST_F(LeakFoldingTest, dominator) {
98   void* buffer1[1];
99   void* buffer2[1] = {nullptr};
100 
101   buffer1[0] = buffer2;
102 
103   HeapWalker heap_walker(heap_);
104 
105   ALLOCATION(heap_walker, buffer1);
106   ALLOCATION(heap_walker, buffer2);
107 
108   LeakFolding folding(heap_, heap_walker);
109 
110   ASSERT_TRUE(folding.FoldLeaks());
111 
112   allocator::vector<LeakFolding::Leak> leaked(heap_);
113   size_t num_leaks = 0;
114   size_t leaked_bytes = 0;
115   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
116 
117   EXPECT_EQ(2U, num_leaks);
118   EXPECT_EQ(2 * sizeof(uintptr_t), leaked_bytes);
119   ASSERT_EQ(1U, leaked.size());
120   EXPECT_EQ(1U, leaked[0].referenced_count);
121   EXPECT_EQ(sizeof(uintptr_t), leaked[0].referenced_size);
122 }
123 
124 TEST_F(LeakFoldingTest, cycle) {
125   void* buffer1[1];
126   void* buffer2[1];
127   void* buffer3[1];
128 
129   buffer1[0] = buffer2;
130   buffer2[0] = buffer3;
131   buffer3[0] = buffer2;
132 
133   HeapWalker heap_walker(heap_);
134 
135   ALLOCATION(heap_walker, buffer1);
136   ALLOCATION(heap_walker, buffer2);
137   ALLOCATION(heap_walker, buffer3);
138 
139   LeakFolding folding(heap_, heap_walker);
140 
141   ASSERT_TRUE(folding.FoldLeaks());
142 
143   allocator::vector<LeakFolding::Leak> leaked(heap_);
144   size_t num_leaks = 0;
145   size_t leaked_bytes = 0;
146   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
147 
148   EXPECT_EQ(3U, num_leaks);
149   EXPECT_EQ(3 * sizeof(uintptr_t), leaked_bytes);
150   ASSERT_EQ(1U, leaked.size());
151   EXPECT_EQ(2U, leaked[0].referenced_count);
152   EXPECT_EQ(2 * sizeof(uintptr_t), leaked[0].referenced_size);
153 }
154 
155 TEST_F(LeakFoldingTest, dominator_cycle) {
156   void* buffer1[2] = {nullptr, nullptr};
157   void* buffer2[2];
158   void* buffer3[1] = {nullptr};
159 
160   buffer1[0] = &buffer2;
161   buffer2[0] = &buffer1;
162   buffer2[1] = &buffer3;
163 
164   HeapWalker heap_walker(heap_);
165 
166   ALLOCATION(heap_walker, buffer1);
167   ALLOCATION(heap_walker, buffer2);
168   ALLOCATION(heap_walker, buffer3);
169 
170   LeakFolding folding(heap_, heap_walker);
171 
172   ASSERT_TRUE(folding.FoldLeaks());
173 
174   allocator::vector<LeakFolding::Leak> leaked(heap_);
175   size_t num_leaks = 0;
176   size_t leaked_bytes = 0;
177   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
178 
179   EXPECT_EQ(3U, num_leaks);
180   EXPECT_EQ(5 * sizeof(uintptr_t), leaked_bytes);
181   ASSERT_EQ(2U, leaked.size());
182 
183   EXPECT_EQ(2U, leaked[0].referenced_count);
184   EXPECT_EQ(3 * sizeof(uintptr_t), leaked[0].referenced_size);
185   EXPECT_EQ(2U, leaked[1].referenced_count);
186   EXPECT_EQ(3 * sizeof(uintptr_t), leaked[1].referenced_size);
187 }
188 
189 TEST_F(LeakFoldingTest, two_cycles) {
190   void* buffer1[1];
191   void* buffer2[1];
192   void* buffer3[1];
193   void* buffer4[1];
194   void* buffer5[1];
195   void* buffer6[1];
196 
197   buffer1[0] = buffer3;
198   buffer2[0] = buffer5;
199   buffer3[0] = buffer4;
200   buffer4[0] = buffer3;
201   buffer5[0] = buffer6;
202   buffer6[0] = buffer5;
203 
204   HeapWalker heap_walker(heap_);
205 
206   ALLOCATION(heap_walker, buffer1);
207   ALLOCATION(heap_walker, buffer2);
208   ALLOCATION(heap_walker, buffer3);
209   ALLOCATION(heap_walker, buffer4);
210   ALLOCATION(heap_walker, buffer5);
211   ALLOCATION(heap_walker, buffer6);
212 
213   LeakFolding folding(heap_, heap_walker);
214 
215   ASSERT_TRUE(folding.FoldLeaks());
216 
217   allocator::vector<LeakFolding::Leak> leaked(heap_);
218   size_t num_leaks = 0;
219   size_t leaked_bytes = 0;
220   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
221 
222   EXPECT_EQ(6U, num_leaks);
223   EXPECT_EQ(6 * sizeof(uintptr_t), leaked_bytes);
224   ASSERT_EQ(2U, leaked.size());
225   EXPECT_EQ(2U, leaked[0].referenced_count);
226   EXPECT_EQ(2 * sizeof(uintptr_t), leaked[0].referenced_size);
227   EXPECT_EQ(2U, leaked[1].referenced_count);
228   EXPECT_EQ(2 * sizeof(uintptr_t), leaked[1].referenced_size);
229 }
230 
231 TEST_F(LeakFoldingTest, two_dominator_cycles) {
232   void* buffer1[1];
233   void* buffer2[1];
234   void* buffer3[1];
235   void* buffer4[1];
236 
237   buffer1[0] = buffer2;
238   buffer2[0] = buffer1;
239   buffer3[0] = buffer4;
240   buffer4[0] = buffer3;
241 
242   HeapWalker heap_walker(heap_);
243 
244   ALLOCATION(heap_walker, buffer1);
245   ALLOCATION(heap_walker, buffer2);
246   ALLOCATION(heap_walker, buffer3);
247   ALLOCATION(heap_walker, buffer4);
248 
249   LeakFolding folding(heap_, heap_walker);
250 
251   ASSERT_TRUE(folding.FoldLeaks());
252 
253   allocator::vector<LeakFolding::Leak> leaked(heap_);
254   size_t num_leaks = 0;
255   size_t leaked_bytes = 0;
256   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
257 
258   EXPECT_EQ(4U, num_leaks);
259   EXPECT_EQ(4 * sizeof(uintptr_t), leaked_bytes);
260   ASSERT_EQ(4U, leaked.size());
261   EXPECT_EQ(1U, leaked[0].referenced_count);
262   EXPECT_EQ(sizeof(uintptr_t), leaked[0].referenced_size);
263   EXPECT_EQ(1U, leaked[1].referenced_count);
264   EXPECT_EQ(sizeof(uintptr_t), leaked[1].referenced_size);
265   EXPECT_EQ(1U, leaked[2].referenced_count);
266   EXPECT_EQ(sizeof(uintptr_t), leaked[2].referenced_size);
267   EXPECT_EQ(1U, leaked[3].referenced_count);
268   EXPECT_EQ(sizeof(uintptr_t), leaked[3].referenced_size);
269 }
270 
271 TEST_F(LeakFoldingTest, giant_dominator_cycle) {
272   const size_t n = 1000;
273   void* buffer[n];
274 
275   HeapWalker heap_walker(heap_);
276 
277   for (size_t i = 0; i < n; i++) {
278     ASSERT_TRUE(heap_walker.Allocation(reinterpret_cast<uintptr_t>(&buffer[i]),
279                                        reinterpret_cast<uintptr_t>(&buffer[i + 1])));
280   }
281 
282   for (size_t i = 0; i < n - 1; i++) {
283     buffer[i] = &buffer[i + 1];
284   }
285   buffer[n - 1] = &buffer[0];
286 
287   LeakFolding folding(heap_, heap_walker);
288 
289   ASSERT_TRUE(folding.FoldLeaks());
290 
291   allocator::vector<LeakFolding::Leak> leaked(heap_);
292   size_t num_leaks = 0;
293   size_t leaked_bytes = 0;
294   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
295 
296   EXPECT_EQ(n, num_leaks);
297   EXPECT_EQ(n * sizeof(uintptr_t), leaked_bytes);
298   ASSERT_EQ(1000U, leaked.size());
299   EXPECT_EQ(n - 1, leaked[0].referenced_count);
300   EXPECT_EQ((n - 1) * sizeof(uintptr_t), leaked[0].referenced_size);
301 }
302 
303 TEST_F(LeakFoldingTest, giant_cycle) {
304   const size_t n = 1000;
305   void* buffer[n];
306   void* buffer1[1];
307 
308   HeapWalker heap_walker(heap_);
309 
310   for (size_t i = 0; i < n - 1; i++) {
311     buffer[i] = &buffer[i + 1];
312   }
313   buffer[n - 1] = &buffer[0];
314 
315   buffer1[0] = &buffer[0];
316 
317   for (size_t i = 0; i < n; i++) {
318     ASSERT_TRUE(heap_walker.Allocation(reinterpret_cast<uintptr_t>(&buffer[i]),
319                                        reinterpret_cast<uintptr_t>(&buffer[i + 1])));
320   }
321 
322   ALLOCATION(heap_walker, buffer1);
323 
324   LeakFolding folding(heap_, heap_walker);
325 
326   ASSERT_TRUE(folding.FoldLeaks());
327 
328   allocator::vector<LeakFolding::Leak> leaked(heap_);
329   size_t num_leaks = 0;
330   size_t leaked_bytes = 0;
331   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
332 
333   EXPECT_EQ(n + 1, num_leaks);
334   EXPECT_EQ((n + 1) * sizeof(uintptr_t), leaked_bytes);
335   ASSERT_EQ(1U, leaked.size());
336   EXPECT_EQ(n, leaked[0].referenced_count);
337   EXPECT_EQ(n * sizeof(uintptr_t), leaked[0].referenced_size);
338 }
339 
340 TEST_F(LeakFoldingTest, multipath) {
341   void* buffer1[2];
342   void* buffer2[1];
343   void* buffer3[1];
344   void* buffer4[1] = {nullptr};
345 
346   //    1
347   //   / \
348   //  v   v
349   //  2   3
350   //   \ /
351   //    v
352   //    4
353 
354   buffer1[0] = &buffer2;
355   buffer1[1] = &buffer3;
356   buffer2[0] = &buffer4;
357   buffer3[0] = &buffer4;
358 
359   HeapWalker heap_walker(heap_);
360 
361   ALLOCATION(heap_walker, buffer1);
362   ALLOCATION(heap_walker, buffer2);
363   ALLOCATION(heap_walker, buffer3);
364   ALLOCATION(heap_walker, buffer4);
365 
366   LeakFolding folding(heap_, heap_walker);
367 
368   ASSERT_TRUE(folding.FoldLeaks());
369 
370   allocator::vector<LeakFolding::Leak> leaked(heap_);
371   size_t num_leaks = 0;
372   size_t leaked_bytes = 0;
373   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
374 
375   EXPECT_EQ(4U, num_leaks);
376   EXPECT_EQ(5 * sizeof(uintptr_t), leaked_bytes);
377   ASSERT_EQ(1U, leaked.size());
378   EXPECT_EQ(3U, leaked[0].referenced_count);
379   EXPECT_EQ(3 * sizeof(uintptr_t), leaked[0].referenced_size);
380 }
381 
382 TEST_F(LeakFoldingTest, multicycle) {
383   void* buffer1[2]{};
384   void* buffer2[2]{};
385   void* buffer3[2]{};
386   void* buffer4[2]{};
387 
388   //    1
389   //   / ^
390   //  v   \
391   //  2 -> 3
392   //   \   ^
393   //    v /
394   //     4
395 
396   buffer1[0] = &buffer2;
397   buffer2[0] = &buffer3;
398   buffer2[1] = &buffer4;
399   buffer3[0] = &buffer1;
400   buffer4[0] = &buffer3;
401 
402   HeapWalker heap_walker(heap_);
403 
404   ALLOCATION(heap_walker, buffer1);
405   ALLOCATION(heap_walker, buffer2);
406   ALLOCATION(heap_walker, buffer3);
407   ALLOCATION(heap_walker, buffer4);
408 
409   LeakFolding folding(heap_, heap_walker);
410 
411   ASSERT_TRUE(folding.FoldLeaks());
412 
413   allocator::vector<LeakFolding::Leak> leaked(heap_);
414   size_t num_leaks = 0;
415   size_t leaked_bytes = 0;
416   ASSERT_EQ(true, folding.Leaked(leaked, &num_leaks, &leaked_bytes));
417 
418   EXPECT_EQ(4U, num_leaks);
419   EXPECT_EQ(8 * sizeof(uintptr_t), leaked_bytes);
420   ASSERT_EQ(4U, leaked.size());
421   EXPECT_EQ(3U, leaked[0].referenced_count);
422   EXPECT_EQ(6 * sizeof(uintptr_t), leaked[0].referenced_size);
423   EXPECT_EQ(3U, leaked[1].referenced_count);
424   EXPECT_EQ(6 * sizeof(uintptr_t), leaked[1].referenced_size);
425   EXPECT_EQ(3U, leaked[2].referenced_count);
426   EXPECT_EQ(6 * sizeof(uintptr_t), leaked[2].referenced_size);
427   EXPECT_EQ(3U, leaked[3].referenced_count);
428   EXPECT_EQ(6 * sizeof(uintptr_t), leaked[3].referenced_size);
429 }
430 
431 }  // namespace android
432