[autotest] Add support to merge directory summaries from multiple result collect

This change adds the main logic to merge the directory summaries from the
multiple result collections during the test. During the merge, it also calculate
the total size of result files transfered from the test device.
At the end, the summaries are merged with all the files in the test result
folder on the drone, which will be used to compile the final view of test
results.

BUG=chromium:716218
TEST=unittest

Change-Id: I3ff326a046497f752125ed679e7c4f72bdddc5fa
Reviewed-on: https://chromium-review.googlesource.com/526274
Commit-Ready: Dan Shi <dshi@google.com>
Tested-by: Dan Shi <dshi@google.com>
Reviewed-by: Dan Shi <dshi@google.com>
diff --git a/client/bin/result_utils.py b/client/bin/result_utils.py
index 3b0a10d..9473e49 100755
--- a/client/bin/result_utils.py
+++ b/client/bin/result_utils.py
@@ -24,24 +24,43 @@
 """
 
 import argparse
+import copy
+import glob
 import json
 import logging
 import os
 import time
 
+# Do NOT import autotest_lib modules here. This module can be executed without
+# dependency on other autotest modules. This is to keep the logic of result
+# trimming on the server side, instead of depending on the autotest client
+# module.
 
 DEFAULT_SUMMARY_FILENAME_FMT = 'dir_summary_%d.json'
 # Minimum disk space should be available after saving the summary file.
 MIN_FREE_DISK_BYTES = 10 * 1024 * 1024
 
-# Key names for directory summaries. The keys are started with / so it can be
-# differentiated with a valid file name. The short keys are designed for smaller
-# file size of the directory summary.
-# Size of the directory or file
-TOTAL_SIZE_BYTES = '/S'
+# Following are key names for directory summaries. The keys are started with /
+# so it can be differentiated with a valid file name. The short keys are
+# designed for smaller file size of the directory summary.
+
+# Original size of the directory or file
+ORIGINAL_SIZE_BYTES = '/S'
+# Size of the directory or file after trimming
+TRIMMED_SIZE_BYTES = '/T'
+# Size of the directory or file being collected from client side
+COLLECTED_SIZE_BYTES = '/C'
 # A dictionary of sub-directories' summary: name: {directory_summary}
 DIRS = '/D'
+# Default root directory name. To allow summaries to be merged effectively, all
+# summaries are collected with root directory of ''
+ROOT_DIR = ''
 
+# Autotest uses some state files to track process running state. The files are
+# deleted from test results. Therefore, these files can be ignored.
+FILES_TO_IGNORE = set([
+    'control.autoserv.state'
+])
 
 def get_unique_dir_summary_file(path):
     """Get a unique file path to save the directory summary json string.
@@ -72,11 +91,11 @@
     @return: A dictionary of the directory summary.
     """
     dir_info = {}
-    dir_info[TOTAL_SIZE_BYTES] = 0
+    dir_info[ORIGINAL_SIZE_BYTES] = 0
     summary = {os.path.basename(path): dir_info}
 
     if os.path.isfile(path):
-        dir_info[TOTAL_SIZE_BYTES] = os.stat(path).st_size
+        dir_info[ORIGINAL_SIZE_BYTES] = os.stat(path).st_size
     else:
         dir_info[DIRS] = {}
         real_path = os.path.realpath(path)
@@ -95,7 +114,7 @@
             f_summary = get_dir_summary(os.path.join(path, f), top_dir,
                                         all_dirs)
             dir_info[DIRS][f] = f_summary[f]
-            dir_info[TOTAL_SIZE_BYTES] += f_summary[f][TOTAL_SIZE_BYTES]
+            dir_info[ORIGINAL_SIZE_BYTES] += f_summary[f][ORIGINAL_SIZE_BYTES]
 
     return summary
 
@@ -113,6 +132,232 @@
     return get_dir_summary(path, top_dir=path)
 
 
+def _update_sizes(entry):
+    """Update a directory entry's sizes.
+
+    Values of ORIGINAL_SIZE_BYTES, TRIMMED_SIZE_BYTES and COLLECTED_SIZE_BYTES
+    are re-calculated based on the files under the directory. If the entry is a
+    file, skip the updating.
+
+    @param entry: A dict of directory entry in a summary.
+    """
+    if DIRS not in entry:
+        return
+
+    entry[ORIGINAL_SIZE_BYTES] = sum([entry[DIRS][s][ORIGINAL_SIZE_BYTES]
+                                     for s in entry[DIRS]])
+    # Before trimming is implemented, COLLECTED_SIZE_BYTES and
+    # TRIMMED_SIZE_BYTES have the same value of ORIGINAL_SIZE_BYTES.
+    entry[COLLECTED_SIZE_BYTES] = sum([
+            entry[DIRS][s].get(
+                    COLLECTED_SIZE_BYTES,
+                    entry[DIRS][s].get(TRIMMED_SIZE_BYTES,
+                                       entry[DIRS][s][ORIGINAL_SIZE_BYTES]))
+            for s in entry[DIRS]])
+    entry[TRIMMED_SIZE_BYTES] = sum([
+            entry[DIRS][s].get(TRIMMED_SIZE_BYTES,
+                               entry[DIRS][s][ORIGINAL_SIZE_BYTES])
+            for s in entry[DIRS]])
+
+
+def _delete_missing_entries(summary_old, summary_new):
+    """Delete files/directories only exists in old summary.
+
+    When the new summary is final, i.e., it's built from the final result
+    directory, files or directories missing are considered to be deleted and
+    trimmed to size 0.
+
+    @param summary_old: Old directory summary.
+    @param summary_new: New directory summary.
+    """
+    for name in summary_old.keys():
+        if name not in summary_new:
+            if DIRS in summary_old[name]:
+                # Trim sub-directories.
+                _delete_missing_entries(summary_old[name][DIRS], {})
+                _update_sizes(summary_old[name])
+            elif name in FILES_TO_IGNORE:
+                # Remove the file from the summary as it can be ignored.
+                del summary_old[name]
+            else:
+                # Before setting the trimmed size to 0, update the collected
+                # size if it's not set yet.
+                if COLLECTED_SIZE_BYTES not in summary_old[name]:
+                    trimmed_size = summary_old[name].get(
+                            TRIMMED_SIZE_BYTES,
+                            summary_old[name][ORIGINAL_SIZE_BYTES])
+                    summary_old[name][COLLECTED_SIZE_BYTES] = trimmed_size
+                summary_old[name][TRIMMED_SIZE_BYTES] = 0
+        elif DIRS in summary_old[name]:
+            _delete_missing_entries(summary_old[name][DIRS],
+                                    summary_new[name][DIRS])
+            _update_sizes(summary_old[name])
+    _update_sizes(summary_old)
+
+
+def _merge(summary_old, summary_new, is_final=False):
+    """Merge a new directory summary to an old one.
+
+    Update the old directory summary with the new summary. Also calculate the
+    total size of results collected from the client side.
+
+    When merging with previously collected results, any results not existing in
+    the new summary or files with size different from the new files collected
+    are considered as extra results collected or overwritten by the new results.
+    Therefore, the size of the collected result should include such files, and
+    the COLLECTED_SIZE_BYTES can be larger than TRIMMED_SIZE_BYTES.
+    As an example:
+    summary_old: {'file1': {TRIMMED_SIZE_BYTES: 1000,
+                            ORIGINAL_SIZE_BYTES: 1000,
+                            COLLECTED_SIZE_BYTES: 1000}}
+    This means a result `file1` of original size 1KB was collected with size of
+    1KB byte.
+    summary_new: {'file1': {TRIMMED_SIZE_BYTES: 1000,
+                            ORIGINAL_SIZE_BYTES: 2000,
+                            COLLECTED_SIZE_BYTES: 1000}}
+    This means a result `file1` of 2KB was trimmed down to 1KB and was collected
+    with size of 1KB byte.
+    Note that the second result collection has an updated result `file1`
+    (because of the different ORIGINAL_SIZE_BYTES), and it needs to be rsync-ed
+    to the drone. Therefore, the merged summary will be:
+    {'file1': {TRIMMED_SIZE_BYTES: 1000,
+               ORIGINAL_SIZE_BYTES: 2000,
+               COLLECTED_SIZE_BYTES: 2000}}
+    Note that:
+    * TRIMMED_SIZE_BYTES is still at 1KB, which reflects the actual size of the
+      file be collected.
+    * ORIGINAL_SIZE_BYTES is updated to 2KB, which is the size of the file in
+      the new result `file1`.
+    * COLLECTED_SIZE_BYTES is 2KB because rsync will copy `file1` twice as it's
+      changed.
+
+    @param summary_old: Old directory summary.
+    @param summary_new: New directory summary.
+    @param is_final: True if summary_new is built from the final result folder.
+            Default is set to False.
+    @return: A tuple of (bytes_diff, merged_summary):
+            bytes_diff: The size of results collected based on the diff of the
+                old summary and the new summary.
+            merged_summary: Merged directory summary.
+    """
+    for name in summary_new:
+        if not name in summary_old:
+            # A file/dir exists in new client dir, but not in the old one, which
+            # means that the file or a directory is newly collected.
+            summary_old[name] = copy.deepcopy(summary_new[name])
+        elif DIRS in summary_new[name]:
+            # `name` is a directory in new summary, merge the directories of the
+            # old and new summaries under `name`.
+
+            if DIRS not in summary_old[name]:
+                # If `name` is a file in old summary but a directory in new
+                # summary, the file in the old summary will be overwritten by
+                # the new directory by rsync. Therefore, force it to be an empty
+                # directory in old summary, so that the new directory can be
+                # merged.
+                summary_old[name][ORIGINAL_SIZE_BYTES] = 0
+                summary_old[name][TRIMMED_SIZE_BYTES] = 0
+                summary_old[name][COLLECTED_SIZE_BYTES] = 0
+                summary_old[name][DIRS] = {}
+
+            _merge(summary_old[name][DIRS], summary_new[name][DIRS], is_final)
+        else:
+            # `name` is a file. Compare the original size, if they are
+            # different, the file was overwritten, so increment the
+            # COLLECTED_SIZE_BYTES.
+
+            if DIRS in summary_old[name]:
+                # If `name` is a directory in old summary, but a file in the new
+                # summary, rsync will fail to copy the file as it can't
+                # overwrite an directory. Therefore, skip the merge.
+                continue
+
+            new_size = summary_new[name][ORIGINAL_SIZE_BYTES]
+            old_size = summary_old[name][ORIGINAL_SIZE_BYTES]
+            new_trimmed_size = summary_new[name].get(
+                    TRIMMED_SIZE_BYTES, summary_new[name][ORIGINAL_SIZE_BYTES])
+            old_trimmed_size = summary_old[name].get(
+                    TRIMMED_SIZE_BYTES, summary_old[name][ORIGINAL_SIZE_BYTES])
+            if new_size != old_size:
+                if is_final and new_trimmed_size == old_trimmed_size:
+                    # If the file is merged from the final result folder to an
+                    # older summary, it's not considered to be trimmed if the
+                    # size is not changed. The reason is that the file on the
+                    # server side does not have the info of its original size.
+                    continue
+
+                # Before trimming is implemented, COLLECTED_SIZE_BYTES is the
+                # value of ORIGINAL_SIZE_BYTES.
+                new_collected_size = summary_new[name].get(
+                        COLLECTED_SIZE_BYTES,
+                        summary_new[name].get(
+                                TRIMMED_SIZE_BYTES,
+                                summary_new[name][ORIGINAL_SIZE_BYTES]))
+                old_collected_size = summary_old[name].get(
+                        COLLECTED_SIZE_BYTES,
+                        summary_old[name].get(
+                                TRIMMED_SIZE_BYTES,
+                                summary_old[name][ORIGINAL_SIZE_BYTES]))
+
+                summary_old[name][COLLECTED_SIZE_BYTES] = (
+                        new_collected_size + old_collected_size)
+                summary_old[name][TRIMMED_SIZE_BYTES] = summary_new[name].get(
+                        TRIMMED_SIZE_BYTES,
+                        summary_new[name][ORIGINAL_SIZE_BYTES])
+                summary_old[name][ORIGINAL_SIZE_BYTES] = new_size
+
+        # Update COLLECTED_SIZE_BYTES and ORIGINAL_SIZE_BYTES based on the
+        # merged directory summary.
+        _update_sizes(summary_old[name])
+
+
+def merge_summaries(path):
+    """Merge all directory summaries in the given path.
+
+    This function calculates the total size of result files being collected for
+    the test device and the files generated on the drone. It also returns merged
+    directory summary.
+
+    @param path: A path to search for directory summaries.
+    @return a tuple of (client_collected_bytes, merged_summary):
+            client_collected_bytes: The total size of results collected from
+                the DUT. The number can be larger than the total file size of
+                the given path, as files can be overwritten or removed.
+            merged_summary: The merged directory summary of the given path.
+    """
+    # Find all directory summary files and sort them by the time stamp in file
+    # name.
+    summary_files = glob.glob(os.path.join(path, 'dir_summary_*.json'))
+    summary_files = sorted(summary_files, key=os.path.getmtime)
+
+    all_summaries = []
+    for summary_file in summary_files:
+        with open(summary_file) as f:
+            all_summaries.append(json.load(f))
+
+    # Merge all summaries.
+    merged_summary = (copy.deepcopy(all_summaries[0]) if len(all_summaries) > 0
+                      else {})
+    for summary in all_summaries[1:]:
+        _merge(merged_summary, summary)
+    # After all summaries from the test device (client side) are merged, we can
+    # get the total size of result files being transfered from the test device.
+    client_collected_bytes = merged_summary[ROOT_DIR][COLLECTED_SIZE_BYTES]
+
+    # Get the summary of current directory
+
+    # Make sure the path ends with /, so the top directory in the summary will
+    # be '', which is consistent with other summaries.
+    if not path.endswith(os.sep):
+        path += os.sep
+
+    last_summary = get_dir_summary(path, top_dir=path)
+    _merge(merged_summary, last_summary, is_final=True)
+    _delete_missing_entries(merged_summary, last_summary)
+
+    return client_collected_bytes, merged_summary
+
+
 def main():
     """main script. """
     parser = argparse.ArgumentParser()
diff --git a/client/bin/result_utils_unittest.py b/client/bin/result_utils_unittest.py
index 3fc0c8c..b13daf0 100644
--- a/client/bin/result_utils_unittest.py
+++ b/client/bin/result_utils_unittest.py
@@ -6,9 +6,11 @@
 """unittest for result_utils.py
 """
 
+import json
 import os
 import shutil
 import tempfile
+import time
 import unittest
 
 import common
@@ -17,62 +19,182 @@
 
 SIZE = 10
 EXPECTED_SUMMARY = {
-        '': {result_utils.TOTAL_SIZE_BYTES: 4 * SIZE,
+        '': {result_utils.ORIGINAL_SIZE_BYTES: 4 * SIZE,
              result_utils.DIRS: {
-                     'file1': {result_utils.TOTAL_SIZE_BYTES: SIZE},
-                     'folder1': {result_utils.TOTAL_SIZE_BYTES: 2 * SIZE,
+                     'file1': {result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+                     'folder1': {result_utils.ORIGINAL_SIZE_BYTES: 2 * SIZE,
                                  result_utils.DIRS: {
                                   'file2': {
-                                      result_utils.TOTAL_SIZE_BYTES: SIZE},
+                                      result_utils.ORIGINAL_SIZE_BYTES: SIZE},
                                   'file3': {
-                                      result_utils.TOTAL_SIZE_BYTES: SIZE},
-                                  'symlink': {result_utils.TOTAL_SIZE_BYTES: 0,
-                                              result_utils.DIRS: {}}}},
-                     'folder2': {result_utils.TOTAL_SIZE_BYTES: SIZE,
+                                      result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+                                  'symlink': {
+                                      result_utils.ORIGINAL_SIZE_BYTES: 0,
+                                      result_utils.DIRS: {}}}},
+                     'folder2': {result_utils.ORIGINAL_SIZE_BYTES: SIZE,
                                  result_utils.DIRS:
-                                     {'file2': {result_utils.TOTAL_SIZE_BYTES:
-                                                SIZE}},
+                                     {'file2':
+                                        {result_utils.ORIGINAL_SIZE_BYTES:
+                                         SIZE}},
                                 }}}}
 
+SUMMARY_1 = {
+  '': {result_utils.ORIGINAL_SIZE_BYTES: 4 * SIZE,
+       result_utils.DIRS: {
+         'file1': {result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+         'file2': {result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+         'folder_not_overwritten':
+            {result_utils.ORIGINAL_SIZE_BYTES: SIZE,
+             result_utils.DIRS: {
+               'file1': {result_utils.ORIGINAL_SIZE_BYTES: SIZE}}
+            },
+          'file_to_be_overwritten': {result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+        }
+      }
+  }
+
+SUMMARY_2 = {
+  '': {result_utils.ORIGINAL_SIZE_BYTES: 26 * SIZE,
+       result_utils.DIRS: {
+         # `file1` exists and has the same size.
+         'file1': {result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+         # Change the size of `file2` to make sure summary merge works.
+         'file2': {result_utils.ORIGINAL_SIZE_BYTES: 2 * SIZE},
+         # `file3` is new.
+         'file3': {result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+         # Add a new sub-directory.
+         'folder1': {result_utils.ORIGINAL_SIZE_BYTES: 20 * SIZE,
+                     result_utils.TRIMMED_SIZE_BYTES: SIZE,
+                     result_utils.DIRS: {
+                         # Add a file being trimmed.
+                         'file4': {
+                           result_utils.ORIGINAL_SIZE_BYTES: 20 * SIZE,
+                           result_utils.TRIMMED_SIZE_BYTES: SIZE}
+                         }
+                     },
+          # Add a file whose name collides with the previous summary.
+          'folder_not_overwritten': {
+            result_utils.ORIGINAL_SIZE_BYTES: 100 * SIZE},
+          # Add a directory whose name collides with the previous summary.
+          'file_to_be_overwritten':
+            {result_utils.ORIGINAL_SIZE_BYTES: SIZE,
+             result_utils.DIRS: {
+               'file1': {result_utils.ORIGINAL_SIZE_BYTES: SIZE}}
+            },
+          # Folder was collected, not missing from the final result folder.
+          'folder_tobe_deleted':
+            {result_utils.ORIGINAL_SIZE_BYTES: SIZE,
+             result_utils.DIRS: {
+               'file_tobe_deleted': {result_utils.ORIGINAL_SIZE_BYTES: SIZE}}
+            },
+        }
+      }
+  }
+
+SUMMARY_1_SIZE = 171
+SUMMARY_2_SIZE = 345
+
+# The final result dir has an extra folder and file, also with `file3` removed
+# to test the case that client files are removed on the server side.
+EXPECTED_MERGED_SUMMARY = {
+  '': {result_utils.ORIGINAL_SIZE_BYTES:
+           37 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE,
+       result_utils.TRIMMED_SIZE_BYTES:
+           17 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE,
+       # Size collected is SIZE bytes more than total size as an old `file2` of
+       # SIZE bytes is overwritten by a newer file.
+       result_utils.COLLECTED_SIZE_BYTES:
+           19 * SIZE + SUMMARY_1_SIZE + SUMMARY_2_SIZE,
+       result_utils.DIRS: {
+         'dir_summary_1.json': {
+           result_utils.ORIGINAL_SIZE_BYTES: SUMMARY_1_SIZE},
+         'dir_summary_2.json': {
+           result_utils.ORIGINAL_SIZE_BYTES: SUMMARY_2_SIZE},
+         'file1': {result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+         'file2': {result_utils.ORIGINAL_SIZE_BYTES: 2 * SIZE,
+                   result_utils.COLLECTED_SIZE_BYTES: 3 * SIZE,
+                   result_utils.TRIMMED_SIZE_BYTES: 2 * SIZE},
+         'file3': {result_utils.ORIGINAL_SIZE_BYTES: SIZE},
+         'folder1': {result_utils.ORIGINAL_SIZE_BYTES: 20 * SIZE,
+                     result_utils.TRIMMED_SIZE_BYTES: SIZE,
+                     result_utils.COLLECTED_SIZE_BYTES: SIZE,
+                     result_utils.DIRS: {
+                         'file4': {result_utils.ORIGINAL_SIZE_BYTES: 20 * SIZE,
+                                   result_utils.TRIMMED_SIZE_BYTES: SIZE}
+                         }
+                     },
+         'folder2': {result_utils.ORIGINAL_SIZE_BYTES: 10 * SIZE,
+                     result_utils.COLLECTED_SIZE_BYTES: 10 * SIZE,
+                     result_utils.TRIMMED_SIZE_BYTES: 10 * SIZE,
+                     result_utils.DIRS: {
+                         'server_file': {
+                           result_utils.ORIGINAL_SIZE_BYTES: 10 * SIZE}
+                         }
+                     },
+         'folder_not_overwritten':
+            {result_utils.ORIGINAL_SIZE_BYTES: SIZE,
+             result_utils.COLLECTED_SIZE_BYTES: SIZE,
+             result_utils.TRIMMED_SIZE_BYTES: SIZE,
+             result_utils.DIRS: {
+               'file1': {result_utils.ORIGINAL_SIZE_BYTES: SIZE}}
+            },
+         'file_to_be_overwritten':
+           {result_utils.ORIGINAL_SIZE_BYTES: SIZE,
+            result_utils.COLLECTED_SIZE_BYTES: SIZE,
+            result_utils.TRIMMED_SIZE_BYTES: SIZE,
+            result_utils.DIRS: {
+              'file1': {result_utils.ORIGINAL_SIZE_BYTES: SIZE}}
+           },
+         'folder_tobe_deleted':
+           {result_utils.ORIGINAL_SIZE_BYTES: SIZE,
+            result_utils.COLLECTED_SIZE_BYTES: SIZE,
+            result_utils.TRIMMED_SIZE_BYTES: 0,
+            result_utils.DIRS: {
+              'file_tobe_deleted': {result_utils.ORIGINAL_SIZE_BYTES: SIZE,
+                                    result_utils.COLLECTED_SIZE_BYTES: SIZE,
+                                    result_utils.TRIMMED_SIZE_BYTES: 0}}
+           },
+        }
+      }
+  }
+
+def create_file(path, size=SIZE):
+    """Create a temp file at given path with the given size.
+
+    @param path: Path to the temp file.
+    @param size: Size of the temp file, default to SIZE.
+    """
+    with open(path, 'w') as f:
+        f.write('A' * size)
+
+
 class GetDirSummaryTest(unittest.TestCase):
     """Test class for get_dir_summary method"""
 
-    def create_file(self, path):
-        """Create a temp file at given path with the given size.
-
-        @param path: Path to the temp file.
-        @param size: Size of the temp file, default to SIZE.
-        """
-        with open(path, 'w') as f:
-            f.write('A' * SIZE)
-
-
     def setUp(self):
         """Setup directory for test."""
         self.test_dir = tempfile.mkdtemp()
         file1 = os.path.join(self.test_dir, 'file1')
-        self.create_file(file1)
+        create_file(file1)
         folder1 = os.path.join(self.test_dir, 'folder1')
         os.mkdir(folder1)
         file2 = os.path.join(folder1, 'file2')
-        self.create_file(file2)
+        create_file(file2)
         file3 = os.path.join(folder1, 'file3')
-        self.create_file(file3)
+        create_file(file3)
 
         folder2 = os.path.join(self.test_dir, 'folder2')
         os.mkdir(folder2)
         file4 = os.path.join(folder2, 'file2')
-        self.create_file(file4)
+        create_file(file4)
 
         symlink = os.path.join(folder1, 'symlink')
         os.symlink(folder2, symlink)
 
-
     def tearDown(self):
         """Cleanup the test directory."""
         shutil.rmtree(self.test_dir, ignore_errors=True)
 
-
     def test_get_dir_summary(self):
         """Test method get_dir_summary."""
         summary_json = result_utils.get_dir_summary(
@@ -80,6 +202,59 @@
         self.assertEqual(EXPECTED_SUMMARY, summary_json)
 
 
+class MergeSummaryTest(unittest.TestCase):
+    """Test class for merge_summaries method"""
+
+    def setUp(self):
+        """Setup directory to match the file structure in MERGED_SUMMARY."""
+        self.test_dir = tempfile.mkdtemp() + '/'
+        file1 = os.path.join(self.test_dir, 'file1')
+        create_file(file1)
+        file2 = os.path.join(self.test_dir, 'file2')
+        create_file(file2, 2*SIZE)
+        file3 = os.path.join(self.test_dir, 'file3')
+        create_file(file3, SIZE)
+        folder1 = os.path.join(self.test_dir, 'folder1')
+        os.mkdir(folder1)
+        file4 = os.path.join(folder1, 'file4')
+        create_file(file4, SIZE)
+        folder2 = os.path.join(self.test_dir, 'folder2')
+        os.mkdir(folder2)
+        server_file = os.path.join(folder2, 'server_file')
+        create_file(server_file, 10*SIZE)
+        folder_not_overwritten = os.path.join(
+                self.test_dir, 'folder_not_overwritten')
+        os.mkdir(folder_not_overwritten)
+        file1 = os.path.join(folder_not_overwritten, 'file1')
+        create_file(file1)
+        file_to_be_overwritten = os.path.join(
+                self.test_dir, 'file_to_be_overwritten')
+        os.mkdir(file_to_be_overwritten)
+        file1 = os.path.join(file_to_be_overwritten, 'file1')
+        create_file(file1)
+
+        # Save summary file to test_dir
+        summary_1 = os.path.join(self.test_dir, 'dir_summary_1.json')
+        with open(summary_1, 'w') as f:
+            json.dump(SUMMARY_1, f)
+        # Wait for 10ms, to make sure summary_2 has a later time stamp.
+        time.sleep(0.01)
+        summary_2 = os.path.join(self.test_dir, 'dir_summary_2.json')
+        with open(summary_2, 'w') as f:
+            json.dump(SUMMARY_2, f)
+
+    def tearDown(self):
+        """Cleanup the test directory."""
+        shutil.rmtree(self.test_dir, ignore_errors=True)
+
+    def testMergeSummaries(self):
+        """Test method merge_summaries."""
+        client_collected_bytes, merged_summary = result_utils.merge_summaries(
+                self.test_dir)
+        self.assertEqual(EXPECTED_MERGED_SUMMARY, merged_summary)
+        self.assertEqual(client_collected_bytes, 9 * SIZE)
+
+
 # this is so the test can be run in standalone mode
 if __name__ == '__main__':
     """Main"""