1#!/usr/bin/python
2# Copyright 2017 The Chromium OS Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""unittest for utils.py
7"""
8
9import json
10import os
11import shutil
12import tempfile
13import time
14import unittest
15
16import common
17from autotest_lib.client.bin.result_tools import result_info
18from autotest_lib.client.bin.result_tools import shrink_file_throttler
19from autotest_lib.client.bin.result_tools import throttler_lib
20from autotest_lib.client.bin.result_tools import utils as result_utils
21from autotest_lib.client.bin.result_tools import utils_lib
22from autotest_lib.client.bin.result_tools import view as result_view
23from autotest_lib.client.bin.result_tools import unittest_lib
24
25SIZE = unittest_lib.SIZE
26
27# Sizes used for testing throttling
28LARGE_SIZE = 1 * 1024 * 1024
29SMALL_SIZE = 1 * 1024
30
31EXPECTED_SUMMARY = {
32        '': {utils_lib.ORIGINAL_SIZE_BYTES: 4 * SIZE,
33             utils_lib.DIRS: [
34                     {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
35                     {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE,
36                                 utils_lib.DIRS: [
37                                  {'file2': {
38                                      utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
39                                  {'file3': {
40                                      utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
41                                  {'symlink': {
42                                      utils_lib.ORIGINAL_SIZE_BYTES: 0,
43                                      utils_lib.DIRS: []}}]}},
44                     {'folder2': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
45                                 utils_lib.DIRS:
46                                     [{'file2':
47                                        {utils_lib.ORIGINAL_SIZE_BYTES:
48                                         SIZE}}],
49                                }}]}}
50
51SUMMARY_1 = {
52  '': {utils_lib.ORIGINAL_SIZE_BYTES: 6 * SIZE,
53       utils_lib.TRIMMED_SIZE_BYTES: 5 * SIZE,
54       utils_lib.DIRS: [
55         {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
56         {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
57         {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE,
58                   utils_lib.TRIMMED_SIZE_BYTES: SIZE}},
59         {'folder_not_overwritten':
60            {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
61             utils_lib.DIRS: [
62               {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}
63              ]}},
64          {'file_to_be_overwritten': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
65        ]
66      }
67  }
68
69SUMMARY_2 = {
70  '': {utils_lib.ORIGINAL_SIZE_BYTES: 27 * SIZE,
71       utils_lib.DIRS: [
72         # `file1` exists and has the same size.
73         {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
74         # Change the size of `file2` to make sure summary merge works.
75         {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE}},
76         # `file3` is new.
77         {'file3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
78         # `file4` is old but throttled earlier.
79         {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
80         # Add a new sub-directory.
81         {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE,
82                     utils_lib.TRIMMED_SIZE_BYTES: SIZE,
83                     utils_lib.DIRS: [
84                         # Add a file being trimmed.
85                         {'file4': {
86                           utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE,
87                           utils_lib.TRIMMED_SIZE_BYTES: SIZE}
88                         }]
89                     }},
90          # Add a file whose name collides with the previous summary.
91          {'folder_not_overwritten': {
92            utils_lib.ORIGINAL_SIZE_BYTES: 100 * SIZE}},
93          # Add a directory whose name collides with the previous summary.
94          {'file_to_be_overwritten':
95            {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
96             utils_lib.DIRS: [
97               {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}]
98            }},
99          # Folder was collected, not missing from the final result folder.
100          {'folder_tobe_deleted':
101            {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
102             utils_lib.DIRS: [
103               {'file_tobe_deleted': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}]
104            }},
105        ]
106      }
107  }
108
109SUMMARY_3 = {
110  '': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
111       utils_lib.DIRS: [
112         {'file10': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
113         ]
114       }
115  }
116
117SUMMARY_1_SIZE = 224
118SUMMARY_2_SIZE = 388
119SUMMARY_3_SIZE = 48
120
121# The final result dir has an extra folder and file, also with `file3` removed
122# to test the case that client files are removed on the server side.
123EXPECTED_MERGED_SUMMARY = {
124  '': {utils_lib.ORIGINAL_SIZE_BYTES:
125           40 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE,
126       utils_lib.TRIMMED_SIZE_BYTES:
127           19 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE,
128       # Size collected is SIZE bytes more than total size as an old `file2` of
129       # SIZE bytes is overwritten by a newer file.
130       utils_lib.COLLECTED_SIZE_BYTES:
131           22 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE + SUMMARY_3_SIZE,
132       utils_lib.DIRS: [
133         {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
134         {'file2': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE,
135                    utils_lib.COLLECTED_SIZE_BYTES: 3 * SIZE}},
136         {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 2 * SIZE,
137                    utils_lib.TRIMMED_SIZE_BYTES: SIZE}},
138         {'folder_not_overwritten':
139            {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
140             utils_lib.DIRS: [
141               {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}]
142            }},
143         {'file_to_be_overwritten':
144           {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
145            utils_lib.COLLECTED_SIZE_BYTES: 2 * SIZE,
146            utils_lib.TRIMMED_SIZE_BYTES: SIZE,
147            utils_lib.DIRS: [
148              {'file1': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}}]
149           }},
150         {'file3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
151         {'folder1': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE,
152                     utils_lib.TRIMMED_SIZE_BYTES: SIZE,
153                     utils_lib.DIRS: [
154                         {'file4': {utils_lib.ORIGINAL_SIZE_BYTES: 20 * SIZE,
155                                   utils_lib.TRIMMED_SIZE_BYTES: SIZE}
156                         }]
157                     }},
158         {'folder_tobe_deleted':
159           {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
160            utils_lib.COLLECTED_SIZE_BYTES: SIZE,
161            utils_lib.TRIMMED_SIZE_BYTES: 0,
162            utils_lib.DIRS: [
163              {'file_tobe_deleted': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE,
164                                    utils_lib.COLLECTED_SIZE_BYTES: SIZE,
165                                    utils_lib.TRIMMED_SIZE_BYTES: 0}}]
166           }},
167         {'folder3': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE + SUMMARY_3_SIZE,
168                     utils_lib.DIRS: [
169                       {'folder31': {
170                         utils_lib.ORIGINAL_SIZE_BYTES: SIZE + SUMMARY_3_SIZE,
171                         utils_lib.DIRS: [
172                             {'file10': {utils_lib.ORIGINAL_SIZE_BYTES: SIZE}},
173                             {'dir_summary_3.json': {
174                               utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_3_SIZE}},
175                            ]}},
176                       ]
177                     }},
178         {'dir_summary_1.json': {
179           utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_1_SIZE}},
180         {'dir_summary_2.json': {
181           utils_lib.ORIGINAL_SIZE_BYTES: SUMMARY_2_SIZE}},
182         {'folder2': {utils_lib.ORIGINAL_SIZE_BYTES: 10 * SIZE,
183                     utils_lib.DIRS: [
184                         {'server_file': {
185                           utils_lib.ORIGINAL_SIZE_BYTES: 10 * SIZE}
186                         }]
187                     }},
188        ]
189      }
190  }
191
192
193class GetDirSummaryTest(unittest.TestCase):
194    """Test class for ResultInfo.build_from_path method"""
195
196    def setUp(self):
197        """Setup directory for test."""
198        self.test_dir = tempfile.mkdtemp()
199        file1 = os.path.join(self.test_dir, 'file1')
200        unittest_lib.create_file(file1)
201        folder1 = os.path.join(self.test_dir, 'folder1')
202        os.mkdir(folder1)
203        file2 = os.path.join(folder1, 'file2')
204        unittest_lib.create_file(file2)
205        file3 = os.path.join(folder1, 'file3')
206        unittest_lib.create_file(file3)
207
208        folder2 = os.path.join(self.test_dir, 'folder2')
209        os.mkdir(folder2)
210        file4 = os.path.join(folder2, 'file2')
211        unittest_lib.create_file(file4)
212
213        symlink = os.path.join(folder1, 'symlink')
214        os.symlink(folder2, symlink)
215
216    def tearDown(self):
217        """Cleanup the test directory."""
218        shutil.rmtree(self.test_dir, ignore_errors=True)
219
220    def test_BuildFromPath(self):
221        """Test method ResultInfo.build_from_path."""
222        summary = result_info.ResultInfo.build_from_path(self.test_dir)
223        self.assertEqual(EXPECTED_SUMMARY, summary)
224
225
226class MergeSummaryTest(unittest.TestCase):
227    """Test class for merge_summaries method"""
228
229    def setUp(self):
230        """Setup directory to match the file structure in MERGED_SUMMARY."""
231        self.test_dir = tempfile.mkdtemp() + '/'
232        file1 = os.path.join(self.test_dir, 'file1')
233        unittest_lib.create_file(file1)
234        file2 = os.path.join(self.test_dir, 'file2')
235        unittest_lib.create_file(file2, 2*SIZE)
236        file3 = os.path.join(self.test_dir, 'file3')
237        unittest_lib.create_file(file3, SIZE)
238        file4 = os.path.join(self.test_dir, 'file4')
239        unittest_lib.create_file(file4, SIZE)
240        folder1 = os.path.join(self.test_dir, 'folder1')
241        os.mkdir(folder1)
242        file4 = os.path.join(folder1, 'file4')
243        unittest_lib.create_file(file4, SIZE)
244
245        # Used to test summary in subdirectory.
246        folder3 = os.path.join(self.test_dir, 'folder3')
247        os.mkdir(folder3)
248        folder31 = os.path.join(folder3, 'folder31')
249        os.mkdir(folder31)
250        file10 = os.path.join(folder31, 'file10')
251        unittest_lib.create_file(file10, SIZE)
252
253        folder2 = os.path.join(self.test_dir, 'folder2')
254        os.mkdir(folder2)
255        server_file = os.path.join(folder2, 'server_file')
256        unittest_lib.create_file(server_file, 10*SIZE)
257        folder_not_overwritten = os.path.join(
258                self.test_dir, 'folder_not_overwritten')
259        os.mkdir(folder_not_overwritten)
260        file1 = os.path.join(folder_not_overwritten, 'file1')
261        unittest_lib.create_file(file1)
262        file_to_be_overwritten = os.path.join(
263                self.test_dir, 'file_to_be_overwritten')
264        os.mkdir(file_to_be_overwritten)
265        file1 = os.path.join(file_to_be_overwritten, 'file1')
266        unittest_lib.create_file(file1)
267
268        # Save summary file to test_dir
269        self.summary_1 = os.path.join(self.test_dir, 'dir_summary_1.json')
270        with open(self.summary_1, 'w') as f:
271            json.dump(SUMMARY_1, f)
272        # Wait for 10ms, to make sure summary_2 has a later time stamp.
273        time.sleep(0.01)
274        self.summary_2 = os.path.join(self.test_dir, 'dir_summary_2.json')
275        with open(self.summary_2, 'w') as f:
276            json.dump(SUMMARY_2, f)
277        time.sleep(0.01)
278        self.summary_3 = os.path.join(self.test_dir, 'folder3', 'folder31',
279                                      'dir_summary_3.json')
280        with open(self.summary_3, 'w') as f:
281            json.dump(SUMMARY_3, f)
282
283    def tearDown(self):
284        """Cleanup the test directory."""
285        shutil.rmtree(self.test_dir, ignore_errors=True)
286
287    def testMergeSummaries(self):
288        """Test method merge_summaries."""
289        collected_bytes, merged_summary, files = result_utils.merge_summaries(
290                self.test_dir)
291
292        self.assertEqual(EXPECTED_MERGED_SUMMARY, merged_summary)
293        self.assertEqual(collected_bytes, 12 * SIZE)
294        self.assertEqual(len(files), 3)
295
296    def testMergeSummariesFromNoHistory(self):
297        """Test method merge_summaries can handle results with no existing
298        summary.
299        """
300        os.remove(self.summary_1)
301        os.remove(self.summary_2)
302        os.remove(self.summary_3)
303        client_collected_bytes, _, _ = result_utils.merge_summaries(
304                self.test_dir)
305        self.assertEqual(client_collected_bytes, 0)
306
307    def testBuildView(self):
308        """Test build method in result_view module."""
309        client_collected_bytes, summary, _ = result_utils.merge_summaries(
310                self.test_dir)
311        html_file = os.path.join(self.test_dir,
312                                 result_view.DEFAULT_RESULT_SUMMARY_NAME)
313        result_view.build(client_collected_bytes, summary, html_file)
314        # Make sure html_file is created with content.
315        self.assertGreater(os.stat(html_file).st_size, 1000)
316
317
318# Not throttled.
319EXPECTED_THROTTLED_SUMMARY_NO_THROTTLE = {
320  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
321       utils_lib.DIRS: [
322           {'files_to_dedupe': {
323               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
324               utils_lib.DIRS: [
325                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
326                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
327                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
328                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
329                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
330                ]
331            }},
332           {'files_to_delete': {
333               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
334               utils_lib.DIRS: [
335                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
336                ]
337            }},
338           {'files_to_shink': {
339               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
340               utils_lib.DIRS: [
341                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
342                ]
343            }},
344           {'files_to_zip': {
345               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
346               utils_lib.DIRS: [
347                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
348                ]
349            }},
350        ]
351       }
352    }
353
354SHRINK_SIZE = shrink_file_throttler.DEFAULT_FILE_SIZE_LIMIT_BYTE
355EXPECTED_THROTTLED_SUMMARY_WITH_SHRINK = {
356  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
357       utils_lib.TRIMMED_SIZE_BYTES:
358            2 * LARGE_SIZE + 5 * SMALL_SIZE + SHRINK_SIZE,
359       utils_lib.DIRS: [
360           {'files_to_dedupe': {
361               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
362               utils_lib.DIRS: [
363                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
364                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
365                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
366                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
367                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
368                ]
369            }},
370           {'files_to_delete': {
371               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
372               utils_lib.DIRS: [
373                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
374                ]
375            }},
376           {'files_to_shink': {
377               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
378               utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE,
379               utils_lib.DIRS: [
380                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
381                                 utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE}},
382                ]
383            }},
384           {'files_to_zip': {
385               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
386               utils_lib.DIRS: [
387                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
388                ]
389            }},
390        ]
391       }
392    }
393
394EXPECTED_THROTTLED_SUMMARY_WITH_DEDUPE = {
395  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
396       utils_lib.TRIMMED_SIZE_BYTES:
397            2 * LARGE_SIZE + 3 * SMALL_SIZE + SHRINK_SIZE,
398       utils_lib.DIRS: [
399           {'files_to_dedupe': {
400               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
401               utils_lib.TRIMMED_SIZE_BYTES: 3 * SMALL_SIZE,
402               utils_lib.DIRS: [
403                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
404                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
405                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE,
406                                   utils_lib.TRIMMED_SIZE_BYTES: 0}},
407                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE,
408                                   utils_lib.TRIMMED_SIZE_BYTES: 0}},
409                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
410                ]
411            }},
412           {'files_to_delete': {
413               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
414               utils_lib.DIRS: [
415                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
416                ]
417            }},
418           {'files_to_shink': {
419               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
420               utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE,
421               utils_lib.DIRS: [
422                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
423                                 utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE}},
424                ]
425            }},
426           {'files_to_zip': {
427               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
428               utils_lib.DIRS: [
429                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
430                ]
431            }},
432        ]
433       }
434    }
435
436
437class ThrottleTest(unittest.TestCase):
438    """Test class for _throttle_results method"""
439
440    def setUp(self):
441        """Setup directory to match the file structure in MERGED_SUMMARY."""
442        self.test_dir = tempfile.mkdtemp()
443
444        folder = os.path.join(self.test_dir, 'files_to_shink')
445        os.mkdir(folder)
446        file1 = os.path.join(folder, 'file.txt')
447        unittest_lib.create_file(file1, LARGE_SIZE)
448
449        folder = os.path.join(self.test_dir, 'files_to_zip')
450        os.mkdir(folder)
451        file1 = os.path.join(folder, 'file.xml')
452        unittest_lib.create_file(file1, LARGE_SIZE)
453
454        folder = os.path.join(self.test_dir, 'files_to_delete')
455        os.mkdir(folder)
456        file1 = os.path.join(folder, 'file.png')
457        unittest_lib.create_file(file1, LARGE_SIZE)
458
459        folder = os.path.join(self.test_dir, 'files_to_dedupe')
460        os.mkdir(folder)
461        for i in range(5):
462            time.sleep(0.01)
463            file1 = os.path.join(folder, 'file_%d.dmp' % i)
464            unittest_lib.create_file(file1, SMALL_SIZE)
465
466    def tearDown(self):
467        """Cleanup the test directory."""
468        shutil.rmtree(self.test_dir, ignore_errors=True)
469
470    def testThrottleResults(self):
471        """Test _throttle_results method."""
472        summary = result_info.ResultInfo.build_from_path(self.test_dir)
473        result_utils._throttle_results(summary, LARGE_SIZE * 10 / 1024)
474        self.assertEqual(EXPECTED_THROTTLED_SUMMARY_NO_THROTTLE, summary)
475
476        result_utils._throttle_results(summary, LARGE_SIZE * 3 / 1024)
477        self.assertEqual(EXPECTED_THROTTLED_SUMMARY_WITH_SHRINK, summary)
478
479    def testThrottleResults_Dedupe(self):
480        """Test _throttle_results method with dedupe triggered."""
481        # Change AUTOTEST_LOG_PATTERN to protect file.xml from being compressed
482        # before deduping kicks in.
483        old_pattern = throttler_lib.AUTOTEST_LOG_PATTERN
484        throttler_lib.AUTOTEST_LOG_PATTERN = '.*/file.xml'
485        try:
486            summary = result_info.ResultInfo.build_from_path(self.test_dir)
487            result_utils._throttle_results(
488                    summary, (2*LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE) / 1024)
489            self.assertEqual(EXPECTED_THROTTLED_SUMMARY_WITH_DEDUPE, summary)
490        finally:
491            throttler_lib.AUTOTEST_LOG_PATTERN = old_pattern
492
493    def testThrottleResults_Zip(self):
494        """Test _throttle_results method with dedupe triggered."""
495        summary = result_info.ResultInfo.build_from_path(self.test_dir)
496        result_utils._throttle_results(
497                summary, (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE) / 1024 + 2)
498        self.assertEqual(
499                3 * LARGE_SIZE + 5 * SMALL_SIZE, summary.original_size)
500
501        entry = summary.get_file('files_to_zip').get_file('file.xml.tgz')
502        self.assertEqual(LARGE_SIZE, entry.original_size)
503        self.assertTrue(LARGE_SIZE > entry.trimmed_size)
504
505        # The compressed file size should be less than 2 KB.
506        self.assertTrue(
507                summary.trimmed_size <
508                (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE + 2 * 1024))
509        self.assertTrue(
510                summary.trimmed_size >
511                (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE))
512
513    def testThrottleResults_Delete(self):
514        """Test _throttle_results method with delete triggered."""
515        summary = result_info.ResultInfo.build_from_path(self.test_dir)
516        result_utils._throttle_results(
517                summary, (3*SMALL_SIZE + SHRINK_SIZE) / 1024 + 2)
518
519        # Confirm the original size is preserved.
520        self.assertEqual(3 * LARGE_SIZE + 5 * SMALL_SIZE, summary.original_size)
521
522        # Confirm the deduped, zipped and shrunk files are not deleted.
523        # The compressed file is at least 512 bytes.
524        self.assertTrue(
525                3 * SMALL_SIZE + SHRINK_SIZE + 512 < summary.original_size)
526
527        # Confirm the file to be zipped is compressed and not deleted.
528        entry = summary.get_file('files_to_zip').get_file('file.xml.tgz')
529        self.assertEqual(LARGE_SIZE, entry.original_size)
530        self.assertTrue(LARGE_SIZE > entry.trimmed_size)
531        self.assertTrue(entry.trimmed_size > 0)
532
533        # Confirm the file to be deleted is removed.
534        entry = summary.get_file('files_to_delete').get_file('file.png')
535        self.assertEqual(0, entry.trimmed_size)
536        self.assertEqual(LARGE_SIZE, entry.original_size)
537
538
539# this is so the test can be run in standalone mode
540if __name__ == '__main__':
541    """Main"""
542    unittest.main()
543