1#!/usr/bin/python2
2# Copyright 2017 The Chromium OS Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""unittest for utils.py
7"""
8
9from __future__ import absolute_import
10from __future__ import division
11from __future__ import print_function
12
13import json
14import os
15import shutil
16import tempfile
17import time
18import unittest
19
20import common
21from autotest_lib.client.bin.result_tools import result_info
22from autotest_lib.client.bin.result_tools import shrink_file_throttler
23from autotest_lib.client.bin.result_tools import throttler_lib
24from autotest_lib.client.bin.result_tools import utils as result_utils
25from autotest_lib.client.bin.result_tools import utils_lib
26from autotest_lib.client.bin.result_tools import view as result_view
27from autotest_lib.client.bin.result_tools import unittest_lib
28from six.moves import range
29
30SIZE = unittest_lib.SIZE
31
32# Sizes used for testing throttling
33LARGE_SIZE = 1 * 1024 * 1024
34SMALL_SIZE = 1 * 1024
35
36EXPECTED_SUMMARY = {
37        '': {utils_lib.ORIGINAL_SIZE_BYTES: 4 * SIZE,
38             utils_lib.DIRS: [
39                     {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
40                     {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE,
41                                 utils_lib.DIRS: [
42                                  {'file2': {
43                                      utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
44                                  {'file3': {
45                                      utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
46                                  {'symlink': {
47                                      utils_lib.ORIGINAL_SIZE_BYTES: 0,
48                                      utils_lib.DIRS: []}}]}},
49                     {'folder2': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
50                                 utils_lib.DIRS:
51                                     [{'file2':
52                                        {utils_lib.ORIGINAL_SIZE_BYTES:
53                                         SIZE}}],
54                                }}]}}
55
56SUMMARY_1 = {
57  '': {utils_lib.ORIGINAL_SIZE_BYTES: 6 * SIZE,
58       utils_lib.TRIMMED_SIZE_BYTES: 5 * SIZE,
59       utils_lib.DIRS: [
60         {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
61         {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
62         {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE,
63                   utils_lib.TRIMMED_SIZE_BYTES: SIZE}},
64         {'folder_not_overwritten':
65            {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
66             utils_lib.DIRS: [
67               {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}
68              ]}},
69          {'file_to_be_overwritten': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
70        ]
71      }
72  }
73
74SUMMARY_2 = {
75  '': {utils_lib.ORIGINAL_SIZE_BYTES: 27 * SIZE,
76       utils_lib.DIRS: [
77         # `file1` exists and has the same size.
78         {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
79         # Change the size of `file2` to make sure summary merge works.
80         {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE}},
81         # `file3` is new.
82         {'file3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
83         # `file4` is old but throttled earlier.
84         {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
85         # Add a new sub-directory.
86         {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE,
87                     utils_lib.TRIMMED_SIZE_BYTES: SIZE,
88                     utils_lib.DIRS: [
89                         # Add a file being trimmed.
90                         {'file4': {
91                           utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE,
92                           utils_lib.TRIMMED_SIZE_BYTES: SIZE}
93                         }]
94                     }},
95          # Add a file whose name collides with the previous summary.
96          {'folder_not_overwritten': {
97            utils_lib.ORIGINAL_SIZE_BYTES: 100 * SIZE}},
98          # Add a directory whose name collides with the previous summary.
99          {'file_to_be_overwritten':
100            {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
101             utils_lib.DIRS: [
102               {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}]
103            }},
104          # Folder was collected, not missing from the final result folder.
105          {'folder_tobe_deleted':
106            {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
107             utils_lib.DIRS: [
108               {'file_tobe_deleted': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}]
109            }},
110        ]
111      }
112  }
113
114SUMMARY_3 = {
115  '': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
116       utils_lib.DIRS: [
117         {'file10': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
118         ]
119       }
120  }
121
122SUMMARY_1_SIZE = 224
123SUMMARY_2_SIZE = 388
124SUMMARY_3_SIZE = 48
125
126# The final result dir has an extra folder and file, also with `file3` removed
127# to test the case that client files are removed on the server side.
128EXPECTED_MERGED_SUMMARY = {
129  '': {utils_lib.ORIGINAL_SIZE_BYTES:
130           40 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE,
131       utils_lib.TRIMMED_SIZE_BYTES:
132           19 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE,
133       # Size collected is SIZE bytes more than total size as an old `file2` of
134       # SIZE bytes is overwritten by a newer file.
135       utils_lib.COLLECTED_SIZE_BYTES:
136           22 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE,
137       utils_lib.DIRS: [
138         {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
139         {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE,
140                    utils_lib.COLLECTED_SIZE_BYTES: 3 * SIZE}},
141         {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE,
142                    utils_lib.TRIMMED_SIZE_BYTES: SIZE}},
143         {'folder_not_overwritten':
144            {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
145             utils_lib.DIRS: [
146               {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}]
147            }},
148         {'file_to_be_overwritten':
149           {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
150            utils_lib.COLLECTED_SIZE_BYTES: 2 * SIZE,
151            utils_lib.TRIMMED_SIZE_BYTES: SIZE,
152            utils_lib.DIRS: [
153              {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}]
154           }},
155         {'file3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
156         {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE,
157                     utils_lib.TRIMMED_SIZE_BYTES: SIZE,
158                     utils_lib.DIRS: [
159                         {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE,
160                                   utils_lib.TRIMMED_SIZE_BYTES: SIZE}
161                         }]
162                     }},
163         {'folder_tobe_deleted':
164           {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
165            utils_lib.COLLECTED_SIZE_BYTES: SIZE,
166            utils_lib.TRIMMED_SIZE_BYTES: 0,
167            utils_lib.DIRS: [
168              {'file_tobe_deleted': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
169                                    utils_lib.COLLECTED_SIZE_BYTES: SIZE,
170                                    utils_lib.TRIMMED_SIZE_BYTES: 0}}]
171           }},
172         {'folder3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE + SUMMARY_3_SIZE,
173                     utils_lib.DIRS: [
174                       {'folder31': {
175                         utils_lib.ORIGINAL_SIZE_BYTES: SIZE + SUMMARY_3_SIZE,
176                         utils_lib.DIRS: [
177                             {'file10': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
178                             {'dir_summary_3.json': {
179                               utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_3_SIZE}},
180                            ]}},
181                       ]
182                     }},
183         {'dir_summary_1.json': {
184           utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_1_SIZE}},
185         {'dir_summary_2.json': {
186           utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_2_SIZE}},
187         {'folder2': {utils_lib.ORIGINAL_SIZE_BYTES: 10 * SIZE,
188                     utils_lib.DIRS: [
189                         {'server_file': {
190                           utils_lib.ORIGINAL_SIZE_BYTES: 10 * SIZE}
191                         }]
192                     }},
193        ]
194      }
195  }
196
197
198class GetDirSummaryTest(unittest.TestCase):
199    """Test class for ResultInfo.build_from_path method"""
200
201    def setUp(self):
202        """Setup directory for test."""
203        self.test_dir = tempfile.mkdtemp()
204        file1 = os.path.join(self.test_dir, 'file1')
205        unittest_lib.create_file(file1)
206        folder1 = os.path.join(self.test_dir, 'folder1')
207        os.mkdir(folder1)
208        file2 = os.path.join(folder1, 'file2')
209        unittest_lib.create_file(file2)
210        file3 = os.path.join(folder1, 'file3')
211        unittest_lib.create_file(file3)
212
213        folder2 = os.path.join(self.test_dir, 'folder2')
214        os.mkdir(folder2)
215        file4 = os.path.join(folder2, 'file2')
216        unittest_lib.create_file(file4)
217
218        symlink = os.path.join(folder1, 'symlink')
219        os.symlink(folder2, symlink)
220
221    def tearDown(self):
222        """Cleanup the test directory."""
223        shutil.rmtree(self.test_dir, ignore_errors=True)
224
225    def test_BuildFromPath(self):
226        """Test method ResultInfo.build_from_path."""
227        summary = result_info.ResultInfo.build_from_path(self.test_dir)
228        self.assertEqual(EXPECTED_SUMMARY, summary)
229
230
231class MergeSummaryTest(unittest.TestCase):
232    """Test class for merge_summaries method"""
233
234    def setUp(self):
235        """Setup directory to match the file structure in MERGED_SUMMARY."""
236        self.test_dir = tempfile.mkdtemp() + '/'
237        file1 = os.path.join(self.test_dir, 'file1')
238        unittest_lib.create_file(file1)
239        file2 = os.path.join(self.test_dir, 'file2')
240        unittest_lib.create_file(file2, 2*SIZE)
241        file3 = os.path.join(self.test_dir, 'file3')
242        unittest_lib.create_file(file3, SIZE)
243        file4 = os.path.join(self.test_dir, 'file4')
244        unittest_lib.create_file(file4, SIZE)
245        folder1 = os.path.join(self.test_dir, 'folder1')
246        os.mkdir(folder1)
247        file4 = os.path.join(folder1, 'file4')
248        unittest_lib.create_file(file4, SIZE)
249
250        # Used to test summary in subdirectory.
251        folder3 = os.path.join(self.test_dir, 'folder3')
252        os.mkdir(folder3)
253        folder31 = os.path.join(folder3, 'folder31')
254        os.mkdir(folder31)
255        file10 = os.path.join(folder31, 'file10')
256        unittest_lib.create_file(file10, SIZE)
257
258        folder2 = os.path.join(self.test_dir, 'folder2')
259        os.mkdir(folder2)
260        server_file = os.path.join(folder2, 'server_file')
261        unittest_lib.create_file(server_file, 10*SIZE)
262        folder_not_overwritten = os.path.join(
263                self.test_dir, 'folder_not_overwritten')
264        os.mkdir(folder_not_overwritten)
265        file1 = os.path.join(folder_not_overwritten, 'file1')
266        unittest_lib.create_file(file1)
267        file_to_be_overwritten = os.path.join(
268                self.test_dir, 'file_to_be_overwritten')
269        os.mkdir(file_to_be_overwritten)
270        file1 = os.path.join(file_to_be_overwritten, 'file1')
271        unittest_lib.create_file(file1)
272
273        # Save summary file to test_dir
274        self.summary_1 = os.path.join(self.test_dir, 'dir_summary_1.json')
275        with open(self.summary_1, 'w') as f:
276            json.dump(SUMMARY_1, f)
277        # Wait for 10ms, to make sure summary_2 has a later time stamp.
278        time.sleep(0.01)
279        self.summary_2 = os.path.join(self.test_dir, 'dir_summary_2.json')
280        with open(self.summary_2, 'w') as f:
281            json.dump(SUMMARY_2, f)
282        time.sleep(0.01)
283        self.summary_3 = os.path.join(self.test_dir, 'folder3', 'folder31',
284                                      'dir_summary_3.json')
285        with open(self.summary_3, 'w') as f:
286            json.dump(SUMMARY_3, f)
287
288    def tearDown(self):
289        """Cleanup the test directory."""
290        shutil.rmtree(self.test_dir, ignore_errors=True)
291
292    def testMergeSummaries(self):
293        """Test method merge_summaries."""
294        collected_bytes, merged_summary, files = result_utils.merge_summaries(
295                self.test_dir)
296
297        self.assertEqual(EXPECTED_MERGED_SUMMARY, merged_summary)
298        self.assertEqual(collected_bytes, 12 * SIZE)
299        self.assertEqual(len(files), 3)
300
301    def testMergeSummariesFromNoHistory(self):
302        """Test method merge_summaries can handle results with no existing
303        summary.
304        """
305        os.remove(self.summary_1)
306        os.remove(self.summary_2)
307        os.remove(self.summary_3)
308        client_collected_bytes, _, _ = result_utils.merge_summaries(
309                self.test_dir)
310        self.assertEqual(client_collected_bytes, 0)
311
312    def testBuildView(self):
313        """Test build method in result_view module."""
314        client_collected_bytes, summary, _ = result_utils.merge_summaries(
315                self.test_dir)
316        html_file = os.path.join(self.test_dir,
317                                 result_view.DEFAULT_RESULT_SUMMARY_NAME)
318        result_view.build(client_collected_bytes, summary, html_file)
319        # Make sure html_file is created with content.
320        self.assertGreater(os.stat(html_file).st_size, 1000)
321
322
323# Not throttled.
324EXPECTED_THROTTLED_SUMMARY_NO_THROTTLE = {
325  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
326       utils_lib.DIRS: [
327           {'files_to_dedupe': {
328               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
329               utils_lib.DIRS: [
330                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
331                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
332                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
333                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
334                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
335                ]
336            }},
337           {'files_to_delete': {
338               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
339               utils_lib.DIRS: [
340                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
341                ]
342            }},
343           {'files_to_shink': {
344               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
345               utils_lib.DIRS: [
346                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
347                ]
348            }},
349           {'files_to_zip': {
350               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
351               utils_lib.DIRS: [
352                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
353                ]
354            }},
355        ]
356       }
357    }
358
359SHRINK_SIZE = shrink_file_throttler.DEFAULT_FILE_SIZE_LIMIT_BYTE
360EXPECTED_THROTTLED_SUMMARY_WITH_SHRINK = {
361  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
362       utils_lib.TRIMMED_SIZE_BYTES:
363            2 * LARGE_SIZE + 5 * SMALL_SIZE + SHRINK_SIZE,
364       utils_lib.DIRS: [
365           {'files_to_dedupe': {
366               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
367               utils_lib.DIRS: [
368                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
369                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
370                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
371                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
372                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
373                ]
374            }},
375           {'files_to_delete': {
376               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
377               utils_lib.DIRS: [
378                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
379                ]
380            }},
381           {'files_to_shink': {
382               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
383               utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE,
384               utils_lib.DIRS: [
385                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
386                                 utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE}},
387                ]
388            }},
389           {'files_to_zip': {
390               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
391               utils_lib.DIRS: [
392                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
393                ]
394            }},
395        ]
396       }
397    }
398
399EXPECTED_THROTTLED_SUMMARY_WITH_DEDUPE = {
400  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
401       utils_lib.TRIMMED_SIZE_BYTES:
402            2 * LARGE_SIZE + 3 * SMALL_SIZE + SHRINK_SIZE,
403       utils_lib.DIRS: [
404           {'files_to_dedupe': {
405               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
406               utils_lib.TRIMMED_SIZE_BYTES: 3 * SMALL_SIZE,
407               utils_lib.DIRS: [
408                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
409                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
410                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE,
411                                   utils_lib.TRIMMED_SIZE_BYTES: 0}},
412                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE,
413                                   utils_lib.TRIMMED_SIZE_BYTES: 0}},
414                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
415                ]
416            }},
417           {'files_to_delete': {
418               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
419               utils_lib.DIRS: [
420                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
421                ]
422            }},
423           {'files_to_shink': {
424               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
425               utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE,
426               utils_lib.DIRS: [
427                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
428                                 utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE}},
429                ]
430            }},
431           {'files_to_zip': {
432               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
433               utils_lib.DIRS: [
434                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
435                ]
436            }},
437        ]
438       }
439    }
440
441
442class ThrottleTest(unittest.TestCase):
443    """Test class for _throttle_results method"""
444
445    def setUp(self):
446        """Setup directory to match the file structure in MERGED_SUMMARY."""
447        self.test_dir = tempfile.mkdtemp()
448
449        folder = os.path.join(self.test_dir, 'files_to_shink')
450        os.mkdir(folder)
451        file1 = os.path.join(folder, 'file.txt')
452        unittest_lib.create_file(file1, LARGE_SIZE)
453
454        folder = os.path.join(self.test_dir, 'files_to_zip')
455        os.mkdir(folder)
456        file1 = os.path.join(folder, 'file.xml')
457        unittest_lib.create_file(file1, LARGE_SIZE)
458
459        folder = os.path.join(self.test_dir, 'files_to_delete')
460        os.mkdir(folder)
461        file1 = os.path.join(folder, 'file.png')
462        unittest_lib.create_file(file1, LARGE_SIZE)
463
464        folder = os.path.join(self.test_dir, 'files_to_dedupe')
465        os.mkdir(folder)
466        for i in range(5):
467            time.sleep(0.01)
468            file1 = os.path.join(folder, 'file_%d.dmp' % i)
469            unittest_lib.create_file(file1, SMALL_SIZE)
470
471    def tearDown(self):
472        """Cleanup the test directory."""
473        shutil.rmtree(self.test_dir, ignore_errors=True)
474
475    def testThrottleResults(self):
476        """Test _throttle_results method."""
477        summary = result_info.ResultInfo.build_from_path(self.test_dir)
478        result_utils._throttle_results(summary, LARGE_SIZE * 10 // 1024)
479        self.assertEqual(EXPECTED_THROTTLED_SUMMARY_NO_THROTTLE, summary)
480
481        result_utils._throttle_results(summary, LARGE_SIZE * 3 // 1024)
482        self.assertEqual(EXPECTED_THROTTLED_SUMMARY_WITH_SHRINK, summary)
483
484    def testThrottleResults_Dedupe(self):
485        """Test _throttle_results method with dedupe triggered."""
486        # Change AUTOTEST_LOG_PATTERN to protect file.xml from being compressed
487        # before deduping kicks in.
488        old_pattern = throttler_lib.AUTOTEST_LOG_PATTERN
489        throttler_lib.AUTOTEST_LOG_PATTERN = '.*/file.xml'
490        try:
491            summary = result_info.ResultInfo.build_from_path(self.test_dir)
492            result_utils._throttle_results(
493                    summary, (2*LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE) // 1024)
494            self.assertEqual(EXPECTED_THROTTLED_SUMMARY_WITH_DEDUPE, summary)
495        finally:
496            throttler_lib.AUTOTEST_LOG_PATTERN = old_pattern
497
498    def testThrottleResults_Zip(self):
499        """Test _throttle_results method with dedupe triggered."""
500        summary = result_info.ResultInfo.build_from_path(self.test_dir)
501        result_utils._throttle_results(
502                summary, (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE) // 1024 + 2)
503        self.assertEqual(
504                3 * LARGE_SIZE + 5 * SMALL_SIZE, summary.original_size)
505
506        entry = summary.get_file('files_to_zip').get_file('file.xml.tgz')
507        self.assertEqual(LARGE_SIZE, entry.original_size)
508        self.assertTrue(LARGE_SIZE > entry.trimmed_size)
509
510        # The compressed file size should be less than 2 KB.
511        self.assertTrue(
512                summary.trimmed_size <
513                (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE + 2 * 1024))
514        self.assertTrue(
515                summary.trimmed_size >
516                (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE))
517
518    def testThrottleResults_Delete(self):
519        """Test _throttle_results method with delete triggered."""
520        summary = result_info.ResultInfo.build_from_path(self.test_dir)
521        result_utils._throttle_results(
522                summary, (3*SMALL_SIZE + SHRINK_SIZE) // 1024 + 2)
523
524        # Confirm the original size is preserved.
525        self.assertEqual(3 * LARGE_SIZE + 5 * SMALL_SIZE, summary.original_size)
526
527        # Confirm the deduped, zipped and shrunk files are not deleted.
528        # The compressed file is at least 512 bytes.
529        self.assertTrue(
530                3 * SMALL_SIZE + SHRINK_SIZE + 512 < summary.original_size)
531
532        # Confirm the file to be zipped is compressed and not deleted.
533        entry = summary.get_file('files_to_zip').get_file('file.xml.tgz')
534        self.assertEqual(LARGE_SIZE, entry.original_size)
535        self.assertTrue(LARGE_SIZE > entry.trimmed_size)
536        self.assertTrue(entry.trimmed_size > 0)
537
538        # Confirm the file to be deleted is removed.
539        entry = summary.get_file('files_to_delete').get_file('file.png')
540        self.assertEqual(0, entry.trimmed_size)
541        self.assertEqual(LARGE_SIZE, entry.original_size)
542
543
544# this is so the test can be run in standalone mode
545if __name__ == '__main__':
546    """Main"""
547    unittest.main()
548