Merge "atest_tradefed: add cts-dalvik-host-test-runner"
diff --git a/atest/atest.py b/atest/atest.py
index bb6764f..e0524e4 100755
--- a/atest/atest.py
+++ b/atest/atest.py
@@ -32,7 +32,6 @@
 import time
 
 import atest_arg_parser
-import atest_error
 import atest_utils
 import cli_translator
 # pylint: disable=import-error
@@ -361,15 +360,10 @@
     build_targets = set()
     test_infos = set()
     if _will_run_tests(args):
-        try:
-            build_targets, test_infos = translator.translate(args)
-            args = _validate_exec_mode(args, test_infos)
-        except atest_error.TestDiscoveryException:
-            logging.exception('Error occured in test discovery:')
-            logging.info('This can happen after a repo sync or if the test is '
-                         'new. Running: with "%s"  may resolve the issue.',
-                         constants.REBUILD_MODULE_INFO_FLAG)
+        build_targets, test_infos = translator.translate(args)
+        if not test_infos:
             return constants.EXIT_CODE_TEST_NOT_FOUND
+        args = _validate_exec_mode(args, test_infos)
     if args.info:
         return _print_test_info(mod_info, test_infos)
     build_targets |= test_runner_handler.get_test_runner_reqs(mod_info,
diff --git a/atest/atest_completion.sh b/atest/atest_completion.sh
index 6ad8bda..713e452 100644
--- a/atest/atest_completion.sh
+++ b/atest/atest_completion.sh
@@ -31,34 +31,58 @@
 fetch_testable_modules() {
     [ -z $ANDROID_PRODUCT_OUT ] && { exit 0; }
     $PYTHON - << END
+import hashlib
 import json
 import os
+import pickle
 import sys
 
 modules = set()
 module_info = os.path.join(os.environ["ANDROID_PRODUCT_OUT"] ,"module-info.json")
 
-if os.path.isfile(module_info):
-    json_data = json.load(open(module_info, 'r'))
+def get_serialised_filename(mod_info):
+    """Determine the serialised filename used for reading testable modules.
 
+    mod_info: the path of module-info.json.
+
+    Returns: a path string hashed with md5 of module-info.json.
+            /dev/shm/atest_e89e37a2e8e45be71567520b8579ffb8 (Linux)
+            /tmp/atest_e89e37a2e8e45be71567520b8579ffb8     (MacOSX)
+    """
+    serial_filename = "/tmp/atest_" if sys.platform == "darwin" else "/dev/shm/atest_"
+    with open(mod_info, 'r') as mod_info_obj:
+        serial_filename += hashlib.md5(mod_info_obj.read().encode('utf-8')).hexdigest()
+    return serial_filename
+
+def create_json_data(mod_info):
+    with open(mod_info, 'r') as mod_info_obj:
+        return json.load(mod_info_obj)
+
+def create_serialised_file(serial_file):
+    # TODO: logic below will be abandoned and utilise test_finder_utils.py
+    # after aosp/736172 merged (b/112904944).
     '''
-    Testable module names can be found via either condition:
-    1. auto_test_config == True
-    2. AndroidTest.xml in the "path"
+    Testable module names can be found by fulfilling both conditions:
+    1. module_name == value['module_name']
+    2. test_config has value OR auto_test_config has value
     '''
-    for module_name, value in json_data.items():
-        if value['auto_test_config']:
+    for module_name, value in create_json_data(module_info).items():
+        if module_name != value.get("module_name", ""):
+            continue
+        elif value.get("auto_test_config") or value.get("test_config"):
             modules.add(module_name)
-        else:
-           for path in value['path']:
-               test_xml = os.path.join(os.environ["ANDROID_BUILD_TOP"], path, "AndroidTest.xml")
-               if os.path.isfile(test_xml):
-                   modules.add(module_name)
-                   break
+    print("\n".join(modules))
+    with open(serial_file, 'wb') as serial_file_obj:
+        pickle.dump(modules, serial_file_obj, protocol=2)
 
-    for module in modules:
-        print(module)
-
+if os.path.isfile(module_info):
+    latest_serial_file = get_serialised_filename(module_info)
+    # When module-info.json changes, recreate a serialisation file.
+    if not os.path.exists(latest_serial_file):
+        create_serialised_file(latest_serial_file)
+    else:
+        with open(latest_serial_file, 'rb') as serial_file_obj:
+            print("\n".join(pickle.load(serial_file_obj)))
 else:
     print("")
 END
@@ -79,8 +103,7 @@
 
 parser = atest_arg_parser.AtestArgParser()
 parser.add_atest_args()
-for arg in parser.get_args():
-    print(arg)
+print("\n".join(parser.get_args()))
 END
 }
 
@@ -94,6 +117,8 @@
         -*)
             COMPREPLY=($(compgen -W "$(fetch_atest_args)" -- $current_word))
             ;;
+        */*)
+            ;;
         *)
             local candidate_args=$(ls; fetch_testable_modules)
             COMPREPLY=($(compgen -W "$candidate_args" -- $current_word))
@@ -102,4 +127,9 @@
     return 0
 }
 
+# Complete file/dir name first by using option "nosort".
+# BASH version <= 4.3 doesn't have nosort option.
+# Note that nosort has no effect for zsh.
+comp_options="-o default -o nosort"
+complete -F _atest $comp_options atest 2>/dev/null || \
 complete -F _atest -o default atest
diff --git a/atest/atest_error.py b/atest/atest_error.py
index f00116f..06ebf19 100644
--- a/atest/atest_error.py
+++ b/atest/atest_error.py
@@ -29,13 +29,13 @@
 class TestWithNoModuleError(TestDiscoveryException):
     """Raised when test files have no parent module directory."""
 
-class MissingPackageNameError(Exception):
+class MissingPackageNameError(TestDiscoveryException):
     """Raised when the test class java file does not contain a package name."""
 
-class TooManyMethodsError(Exception):
+class TooManyMethodsError(TestDiscoveryException):
     """Raised when input string contains more than one # character."""
 
-class MethodWithoutClassError(Exception):
+class MethodWithoutClassError(TestDiscoveryException):
     """Raised when method is appended via # but no class file specified."""
 
 class UnknownTestRunnerError(Exception):
@@ -53,11 +53,11 @@
 class ShouldNeverBeCalledError(Exception):
     """Raised when something is called when it shouldn't, used for testing."""
 
-class FatalIncludeError(SyntaxError):
+class FatalIncludeError(TestDiscoveryException):
     """Raised if expanding include tag fails."""
 
-class MissingCCTestCaseError(Exception):
+class MissingCCTestCaseError(TestDiscoveryException):
     """Raised when the cc file does not contain a test case class."""
 
-class XmlNotExistError(Exception):
+class XmlNotExistError(TestDiscoveryException):
     """Raised when the xml file does not exist."""
diff --git a/atest/cli_translator.py b/atest/cli_translator.py
index b4f8337..2cc0470 100644
--- a/atest/cli_translator.py
+++ b/atest/cli_translator.py
@@ -76,13 +76,17 @@
             test_mapping_test_details = [None] * len(tests)
         for test, tm_test_detail in zip(tests, test_mapping_test_details):
             test_found = False
+            find_test_err_msg = None
             for finder in test_finder_handler.get_find_methods_for_test(
                     self.mod_info, test):
                 # For tests in TEST_MAPPING, find method is only related to
                 # test name, so the details can be set after test_info object
                 # is created.
-                test_info = finder.find_method(finder.test_finder_instance,
-                                               test)
+                try:
+                    test_info = finder.find_method(finder.test_finder_instance,
+                                                   test)
+                except atest_error.TestDiscoveryException as e:
+                    find_test_err_msg = e
                 if test_info:
                     if tm_test_detail:
                         test_info.data[constants.TI_MODULE_ARG] = (
@@ -91,12 +95,22 @@
                     test_infos.add(test_info)
                     test_found = True
                     finder_info = finder.finder_info
-                    clr_test_name = atest_utils.colorize(test, constants.GREEN)
-                    print("Found '%s' as %s" % (clr_test_name, finder_info))
+                    print("Found '%s' as %s" % (
+                        atest_utils.colorize(test, constants.GREEN),
+                        finder_info))
                     break
             if not test_found:
-                raise atest_error.NoTestFoundError('No test found for: %s' %
-                                                   test)
+                print('No test found for: %s' %
+                      atest_utils.colorize(test, constants.RED))
+                if find_test_err_msg:
+                    print('%s\n' % (atest_utils.colorize(
+                        find_test_err_msg, constants.MAGENTA)))
+                else:
+                    print('(This can happen after a repo sync or if the test'
+                          ' is new. Running: with "%s" may resolve the issue.)'
+                          '\n' % (atest_utils.colorize(
+                              constants.REBUILD_MODULE_INFO_FLAG,
+                              constants.RED)))
         return test_infos
 
     def _read_tests_in_test_mapping(self, test_mapping_file):
diff --git a/atest/cli_translator_unittest.py b/atest/cli_translator_unittest.py
index 899c57c..fb2530e 100755
--- a/atest/cli_translator_unittest.py
+++ b/atest/cli_translator_unittest.py
@@ -21,7 +21,6 @@
 import re
 import mock
 
-import atest_error
 import cli_translator as cli_t
 import constants
 import test_finder_handler
@@ -103,11 +102,12 @@
         unittest_utils.assert_strict_equal(
             self, ctr._get_test_infos(mult_test), expected_test_infos)
 
-        # Let's make sure we raise an error when we have no tests found.
+        # Check return null set when we have no tests found.
         mock_getfindmethods.return_value = [
             test_finder_base.Finder(None, find_method_return_nothing, None)]
-        self.assertRaises(atest_error.NoTestFoundError, ctr._get_test_infos,
-                          one_test)
+        null_test_info = set()
+        self.assertEqual(null_test_info, ctr._get_test_infos(one_test))
+        self.assertEqual(null_test_info, ctr._get_test_infos(mult_test))
 
         # Check the method works for test mapping.
         test_detail1 = test_mapping.TestDetail(uc.TEST_MAPPING_TEST)
diff --git a/atest/test_finders/module_finder.py b/atest/test_finders/module_finder.py
index 421dd57..4933e19 100644
--- a/atest/test_finders/module_finder.py
+++ b/atest/test_finders/module_finder.py
@@ -442,8 +442,9 @@
         """
         _, methods = test_finder_utils.split_methods(package)
         if methods:
-            raise atest_error.MethodWithoutClassError('Method filtering '
-                                                      'requires class')
+            raise atest_error.MethodWithoutClassError('%s: Method filtering '
+                                                      'requires class' % (
+                                                          methods))
         # Confirm that packages exists and get user input for multiples.
         if rel_config:
             search_dir = os.path.join(self.root_dir,
@@ -537,7 +538,8 @@
         # Path is to cc file.
         elif file_name and _CC_EXT_RE.match(file_name):
             if not test_finder_utils.has_cc_class(path):
-                raise atest_error.MissingCCTestCaseError(path)
+                raise atest_error.MissingCCTestCaseError(
+                    "Can't find CC class in %s" % path)
             if methods:
                 data[constants.TI_FILTER] = frozenset(
                     [test_info.TestFilter(test_finder_utils.get_cc_filter(
@@ -552,7 +554,9 @@
                     if package_name:
                         # methods should be empty frozenset for package.
                         if methods:
-                            raise atest_error.MethodWithoutClassError()
+                            raise atest_error.MethodWithoutClassError(
+                                '%s: Method filtering requires class'
+                                % str(methods))
                         data[constants.TI_FILTER] = frozenset(
                             [test_info.TestFilter(package_name, methods)])
                         break
diff --git a/atest/test_finders/test_finder_utils.py b/atest/test_finders/test_finder_utils.py
index bf1417f..ef91c17 100644
--- a/atest/test_finders/test_finder_utils.py
+++ b/atest/test_finders/test_finder_utils.py
@@ -160,7 +160,9 @@
                 package = match.group('package')
                 cls = os.path.splitext(os.path.split(test_path)[1])[0]
                 return '%s.%s' % (package, cls)
-    raise atest_error.MissingPackageNameError(test_path)
+    raise atest_error.MissingPackageNameError('%s: Test class java file'
+                                              'does not contain a package'
+                                              'name.'% test_path)
 
 
 def has_cc_class(test_path):
@@ -629,7 +631,8 @@
         A set of plan config paths which are depended by xml_file.
     """
     if not os.path.exists(xml_file):
-        raise atest_error.XmlNotExistError(xml_file)
+        raise atest_error.XmlNotExistError('%s: The xml file does'
+                                           'not exist' % xml_file)
     plans = set()
     xml_root = ET.parse(xml_file).getroot()
     plans.add(xml_file)
diff --git a/atest/test_runner_handler.py b/atest/test_runner_handler.py
index 6839992..32cdac8 100644
--- a/atest/test_runner_handler.py
+++ b/atest/test_runner_handler.py
@@ -104,6 +104,7 @@
     """Run the given tests.
 
     Args:
+        results_dir: String directory to store atest results.
         test_infos: List of TestInfo.
         extra_args: Dict of extra args for test runners to use.
 
diff --git a/prod-tests/src/com/android/performance/tests/AppInstallTest.java b/prod-tests/src/com/android/performance/tests/AppInstallTest.java
index ac85b3b..684722c 100644
--- a/prod-tests/src/com/android/performance/tests/AppInstallTest.java
+++ b/prod-tests/src/com/android/performance/tests/AppInstallTest.java
@@ -25,6 +25,7 @@
 import com.android.tradefed.testtype.IDeviceTest;
 import com.android.tradefed.testtype.IRemoteTest;
 import com.android.tradefed.util.AaptParser;
+import com.android.tradefed.util.RunUtil;
 import com.android.tradefed.util.proto.TfMetricProtoUtil;
 import java.io.File;
 import java.util.HashMap;
@@ -60,6 +61,10 @@
     )
     private boolean mUseDexMetadata = false;
 
+    @Option(name = "test-delay-between-installs",
+            description = "Delay in ms to wait for before starting the install test.")
+    private long mTestDelayBetweenInstalls = 5000;
+
     @Option(
         name = "test-dex-metadata-variant",
         description =
@@ -106,12 +111,9 @@
 
         // Delay test start time to give the background processes to finish.
         if (mTestStartDelay > 0) {
-            try {
-                Thread.sleep(mTestStartDelay);
-            } catch (InterruptedException e) {
-                CLog.e("Failed to delay test: %s", e.toString());
-            }
+            RunUtil.getDefault().sleep(mTestStartDelay);
         }
+
         Assert.assertFalse(mTestApkPath.isEmpty());
         File apkDir = new File(mTestApkPath);
         Assert.assertTrue(apkDir.isDirectory());
@@ -126,8 +128,11 @@
                 }
                 File file = new File(apkDir, fileName);
                 // Install app and measure time.
-                String installTime = Long.toString(installAndTime(file));
-                metrics.put(fileName, installTime);
+                long installTime = installAndTime(file);
+                if (installTime > 0) {
+                    metrics.put(fileName, Long.toString(installTime));
+                }
+                RunUtil.getDefault().sleep(mTestDelayBetweenInstalls);
             }
         } finally {
             reportMetrics(listener, mTestLabel, metrics);
@@ -142,11 +147,16 @@
      */
     long installAndTime(File packageFile) throws DeviceNotAvailableException {
         AaptParser parser = AaptParser.parse(packageFile);
+        if (parser == null) {
+            CLog.e("Failed to parse %s", packageFile);
+            return -1;
+        }
         String packageName = parser.getPackageName();
 
         String remotePath = "/data/local/tmp/" + packageFile.getName();
         if (!mDevice.pushFile(packageFile, remotePath)) {
-            throw new RuntimeException("Failed to push " + packageFile.getAbsolutePath());
+            CLog.e("Failed to push %s", packageFile);
+            return -1;
         }
 
         String dmRemotePath = null;
@@ -154,7 +164,8 @@
             File dexMetadataFile = getDexMetadataFile(packageFile);
             dmRemotePath = "/data/local/tmp/" + dexMetadataFile.getName();
             if (!mDevice.pushFile(dexMetadataFile, dmRemotePath)) {
-                throw new RuntimeException("Failed to push " + dexMetadataFile.getAbsolutePath());
+                CLog.e("Failed to push %s", dexMetadataFile);
+                return -1;
             }
         }
 
diff --git a/pylintrc b/pylintrc
index 3d0fc11..ba97286 100644
--- a/pylintrc
+++ b/pylintrc
@@ -15,6 +15,9 @@
 # Regular expression matching correct method names
 method-rgx=(([a-z][a-z0-9_]{2,50})|(_[a-z0-9_]*))$
 
+# Good variable names which should always be accepted, separated by a comma
+good-names=e, f, i, j
+
 [DESIGN]
 
 # Maximum number of return / yield for function / method body
diff --git a/src/com/android/tradefed/command/CommandScheduler.java b/src/com/android/tradefed/command/CommandScheduler.java
index 5c7e1c9..545c2f6 100644
--- a/src/com/android/tradefed/command/CommandScheduler.java
+++ b/src/com/android/tradefed/command/CommandScheduler.java
@@ -1481,12 +1481,16 @@
         // Check if device is not used in another invocation.
         throwIfDeviceInInvocationThread(context.getDevices());
 
-        CLog.d("starting invocation for command id %d", cmd.getCommandTracker().getId());
+        int invocationId = cmd.getCommandTracker().getId();
+        CLog.d("starting invocation for command id %d", invocationId);
         // Name invocation with first device serial
         final String invocationName = String.format("Invocation-%s",
                 context.getSerials().get(0));
         InvocationThread invocationThread = new InvocationThread(invocationName, context, cmd,
                 listeners);
+        // Link context and command
+        context.addInvocationAttribute(
+                IInvocationContext.INVOCATION_ID, Integer.toString(invocationId));
         logInvocationStartedEvent(cmd.getCommandTracker(), context);
         invocationThread.start();
         addInvocationThread(invocationThread);
diff --git a/src/com/android/tradefed/invoker/IInvocationContext.java b/src/com/android/tradefed/invoker/IInvocationContext.java
index f3b8c24..b4e89be 100644
--- a/src/com/android/tradefed/invoker/IInvocationContext.java
+++ b/src/com/android/tradefed/invoker/IInvocationContext.java
@@ -34,11 +34,17 @@
  */
 public interface IInvocationContext extends Serializable {
 
+    /** Key used for storing associated invocation ID. */
+    public static final String INVOCATION_ID = "invocation-id";
+
     public enum TimingEvent {
         FETCH_BUILD,
         SETUP;
     }
 
+    /** @return associated invocation ID or {@code null} if not linked to an invocation */
+    public String getInvocationId();
+
     /**
      * Return the number of devices allocated for the invocation.
      */
diff --git a/src/com/android/tradefed/invoker/InvocationContext.java b/src/com/android/tradefed/invoker/InvocationContext.java
index 8cdcae2..e147955 100644
--- a/src/com/android/tradefed/invoker/InvocationContext.java
+++ b/src/com/android/tradefed/invoker/InvocationContext.java
@@ -77,6 +77,12 @@
         mShardSerials = new LinkedHashMap<Integer, List<String>>();
     }
 
+    @Override
+    public String getInvocationId() {
+        List<String> values = mInvocationAttributes.get(INVOCATION_ID);
+        return values == null || values.isEmpty() ? null : values.get(0);
+    }
+
     /**
      * {@inheritDoc}
      */
diff --git a/src/com/android/tradefed/invoker/sandbox/ParentSandboxInvocationExecution.java b/src/com/android/tradefed/invoker/sandbox/ParentSandboxInvocationExecution.java
index e6df693..c0b8b8b 100644
--- a/src/com/android/tradefed/invoker/sandbox/ParentSandboxInvocationExecution.java
+++ b/src/com/android/tradefed/invoker/sandbox/ParentSandboxInvocationExecution.java
@@ -15,12 +15,19 @@
  */
 package com.android.tradefed.invoker.sandbox;
 
+import com.android.annotations.VisibleForTesting;
+import com.android.tradefed.config.Configuration;
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.ConfigurationFactory;
 import com.android.tradefed.config.IConfiguration;
+import com.android.tradefed.config.IConfigurationFactory;
 import com.android.tradefed.device.DeviceNotAvailableException;
 import com.android.tradefed.invoker.IInvocationContext;
 import com.android.tradefed.invoker.InvocationExecution;
+import com.android.tradefed.log.LogUtil.CLog;
 import com.android.tradefed.result.ITestInvocationListener;
 import com.android.tradefed.sandbox.SandboxInvocationRunner;
+import com.android.tradefed.sandbox.SandboxOptions;
 import com.android.tradefed.targetprep.BuildError;
 import com.android.tradefed.targetprep.TargetSetupError;
 
@@ -30,11 +37,19 @@
  */
 public class ParentSandboxInvocationExecution extends InvocationExecution {
 
+    private IConfiguration mParentPreparerConfig = null;
+
     @Override
     public void doSetup(
             IInvocationContext context, IConfiguration config, ITestInvocationListener listener)
             throws TargetSetupError, BuildError, DeviceNotAvailableException {
         // Skip
+        mParentPreparerConfig = getParentTargetConfig(config);
+        if (mParentPreparerConfig == null) {
+            return;
+        }
+        CLog.d("Using %s to run in the parent setup.", SandboxOptions.PARENT_PREPARER_CONFIG);
+        super.doSetup(context, mParentPreparerConfig, listener);
     }
 
     @Override
@@ -43,11 +58,22 @@
         // Skip
         // If we are the parent invocation of the sandbox, setUp has been skipped since it's
         // done in the sandbox, so tearDown should be skipped.
+        mParentPreparerConfig = getParentTargetConfig(config);
+        if (mParentPreparerConfig == null) {
+            return;
+        }
+        CLog.d("Using %s to run in the parent tear down.", SandboxOptions.PARENT_PREPARER_CONFIG);
+        super.doTeardown(context, mParentPreparerConfig, exception);
     }
 
     @Override
     public void doCleanUp(IInvocationContext context, IConfiguration config, Throwable exception) {
         // Skip
+        if (mParentPreparerConfig == null) {
+            return;
+        }
+        CLog.d("Using %s to run in the parent clean up.", SandboxOptions.PARENT_PREPARER_CONFIG);
+        super.doCleanUp(context, mParentPreparerConfig, exception);
     }
 
     @Override
@@ -57,4 +83,35 @@
         // If the invocation is sandboxed run as a sandbox instead.
         SandboxInvocationRunner.prepareAndRun(config, context, listener);
     }
+
+    /** Returns the {@link IConfigurationFactory} used to created configurations. */
+    @VisibleForTesting
+    protected IConfigurationFactory getFactory() {
+        return ConfigurationFactory.getInstance();
+    }
+
+    private IConfiguration getParentTargetConfig(IConfiguration config) throws TargetSetupError {
+        if (mParentPreparerConfig != null) {
+            return mParentPreparerConfig;
+        }
+        SandboxOptions options =
+                (SandboxOptions)
+                        config.getConfigurationObject(Configuration.SANBOX_OPTIONS_TYPE_NAME);
+        if (options != null && options.getParentPreparerConfig() != null) {
+            try {
+                return getFactory()
+                        .createConfigurationFromArgs(
+                                new String[] {options.getParentPreparerConfig()});
+            } catch (ConfigurationException e) {
+                String message =
+                        String.format(
+                                "Check your --%s option: %s",
+                                SandboxOptions.PARENT_PREPARER_CONFIG, e.getMessage());
+                CLog.e(message);
+                CLog.e(e);
+                throw new TargetSetupError(message, e, null);
+            }
+        }
+        return null;
+    }
 }
diff --git a/src/com/android/tradefed/postprocessor/AggregatePostProcessor.java b/src/com/android/tradefed/postprocessor/AggregatePostProcessor.java
new file mode 100644
index 0000000..f6746b3
--- /dev/null
+++ b/src/com/android/tradefed/postprocessor/AggregatePostProcessor.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.postprocessor;
+
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.metrics.proto.MetricMeasurement.Measurements;
+import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
+
+import com.google.common.collect.ListMultimap;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.DoubleSummaryStatistics;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * A metric aggregator that gives the min, max, mean, variance and standard deviation for numeric
+ * metrics collected during multiple-iteration test runs, treating them as doubles. Non-numeric
+ * metrics are ignored.
+ *
+ * <p>It parses metrics from single string as currently metrics are passed this way.
+ */
+public class AggregatePostProcessor extends BasePostProcessor {
+    private static final String STATS_KEY_MIN = "min";
+    private static final String STATS_KEY_MAX = "max";
+    private static final String STATS_KEY_MEAN = "mean";
+    private static final String STATS_KEY_VAR = "var";
+    private static final String STATS_KEY_STDEV = "stdev";
+    // Separator for final upload
+    private static final String STATS_KEY_SEPARATOR = "-";
+
+    @Override
+    public Map<String, Metric.Builder> processRunMetrics(HashMap<String, Metric> rawMetrics) {
+        return new HashMap<String, Metric.Builder>();
+    }
+
+    @Override
+    public Map<String, Metric.Builder> processAllTestMetrics(
+            ListMultimap<String, Metric> allTestMetrics) {
+        // Aggregate final test metrics.
+        Map<String, Metric.Builder> aggregateMetrics = new HashMap<String, Metric.Builder>();
+        for (String key : allTestMetrics.keySet()) {
+            List<Metric> metrics = allTestMetrics.get(key);
+            List<Measurements> measures =
+                    metrics.stream().map(Metric::getMeasurements).collect(Collectors.toList());
+            // Parse metrics into a list of SingleString values, concating lists in the process
+            List<String> rawValues =
+                    measures.stream()
+                            .map(Measurements::getSingleString)
+                            .map(
+                                    m -> {
+                                        // Split results; also deals with the case of empty results
+                                        // in a certain run
+                                        List<String> splitVals = Arrays.asList(m.split(",", 0));
+                                        if (splitVals.size() == 1 && splitVals.get(0).isEmpty()) {
+                                            return Collections.<String>emptyList();
+                                        }
+                                        return splitVals;
+                                    })
+                            .flatMap(Collection::stream)
+                            .map(String::trim)
+                            .collect(Collectors.toList());
+            // Do not report empty metrics
+            if (rawValues.isEmpty()) {
+                continue;
+            }
+            boolean areAllDoubles =
+                    rawValues
+                            .stream()
+                            .allMatch(
+                                    val -> {
+                                        try {
+                                            Double.parseDouble(val);
+                                            return true;
+                                        } catch (NumberFormatException e) {
+                                            return false;
+                                        }
+                                    });
+            if (areAllDoubles) {
+                List<Double> values =
+                        rawValues.stream().map(Double::parseDouble).collect(Collectors.toList());
+                HashMap<String, Double> stats = getStats(values);
+                for (String statKey : stats.keySet()) {
+                    Metric.Builder metricBuilder = Metric.newBuilder();
+                    metricBuilder
+                            .getMeasurementsBuilder()
+                            .setSingleString(String.format("%2.2f", stats.get(statKey)));
+                    aggregateMetrics.put(
+                            String.join(STATS_KEY_SEPARATOR, key, statKey), metricBuilder);
+                }
+            } else {
+                CLog.i("Metric %s is not numeric", key);
+            }
+        }
+        // Ignore the passed-in run metrics.
+        return aggregateMetrics;
+    }
+
+    private HashMap<String, Double> getStats(Iterable<Double> values) {
+        HashMap<String, Double> stats = new HashMap<>();
+        DoubleSummaryStatistics summaryStats = new DoubleSummaryStatistics();
+        for (Double value : values) {
+            summaryStats.accept(value);
+        }
+        Double mean = summaryStats.getAverage();
+        Double count = Long.valueOf(summaryStats.getCount()).doubleValue();
+        Double variance = (double) 0;
+        for (Double value : values) {
+            variance += Math.pow(value - mean, 2) / count;
+        }
+        stats.put(STATS_KEY_MIN, summaryStats.getMin());
+        stats.put(STATS_KEY_MAX, summaryStats.getMax());
+        stats.put(STATS_KEY_MEAN, mean);
+        stats.put(STATS_KEY_VAR, variance);
+        stats.put(STATS_KEY_STDEV, Math.sqrt(variance));
+        return stats;
+    }
+}
diff --git a/src/com/android/tradefed/result/TestRunResult.java b/src/com/android/tradefed/result/TestRunResult.java
index 195dad8..af9c6b2 100644
--- a/src/com/android/tradefed/result/TestRunResult.java
+++ b/src/com/android/tradefed/result/TestRunResult.java
@@ -206,7 +206,11 @@
         if (mExpectedTestCount == 0) {
             mExpectedTestCount = testCount;
         } else {
-            CLog.w("%s calls testRunStarted more than once", runName);
+            CLog.w(
+                    "%s calls testRunStarted more than once. Previous expected count: %s. "
+                            + "New Expected count: %s",
+                    runName, mExpectedTestCount, mExpectedTestCount + testCount);
+            mExpectedTestCount += testCount;
         }
         mTestRunName = runName;
         mIsRunComplete = false;
diff --git a/src/com/android/tradefed/sandbox/SandboxOptions.java b/src/com/android/tradefed/sandbox/SandboxOptions.java
index 2ccb678..86e2895 100644
--- a/src/com/android/tradefed/sandbox/SandboxOptions.java
+++ b/src/com/android/tradefed/sandbox/SandboxOptions.java
@@ -28,6 +28,7 @@
     public static final String SANDBOX_BUILD_ID = "sandbox-build-id";
     public static final String USE_PROTO_REPORTER = "use-proto-reporter";
     public static final String CHILD_GLOBAL_CONFIG = "sub-global-config";
+    public static final String PARENT_PREPARER_CONFIG = "parent-preparer-config";
 
     @Option(
         name = TF_LOCATION,
@@ -56,6 +57,13 @@
                             + " sandbox.")
     private String mChildGlobalConfig = null;
 
+    @Option(
+        name = PARENT_PREPARER_CONFIG,
+        description =
+                "A configuration which target_preparers will be run in the parent of the sandbox."
+    )
+    private String mParentPreparerConfig = null;
+
     /**
      * Returns the provided directories containing the Trade Federation version to use for
      * sandboxing the run.
@@ -81,4 +89,9 @@
     public String getChildGlobalConfig() {
         return mChildGlobalConfig;
     }
+
+    /** Returns the configuration which preparer should run in the parent process of the sandbox. */
+    public String getParentPreparerConfig() {
+        return mParentPreparerConfig;
+    }
 }
diff --git a/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java b/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
index 8b79420..ff4fcdd 100644
--- a/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
+++ b/src/com/android/tradefed/testtype/DeviceJUnit4ClassRunner.java
@@ -141,13 +141,13 @@
      *
      * &#064;Test
      * public void testFoo() {
-     *     metrics.put("key", "value");
-     *     metrics.put("key2", "value2");
+     *     metrics.addTestMetric("key", "value");
+     *     metrics.addTestMetric("key2", "value2");
      * }
      *
      * &#064;Test
      * public void testFoo2() {
-     *     metrics.put("key3", "value3");
+     *     metrics.addTestMetric("key3", "value3");
      * }
      * </pre>
      */
diff --git a/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java b/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
index c722798..fead3b3 100644
--- a/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
+++ b/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
@@ -118,12 +118,16 @@
     static final Pattern PATTERN_TEST_SKIPPED = Pattern.compile("skipped '.*");
     static final Pattern PATTERN_TEST_UNEXPECTED_SUCCESS = Pattern.compile("unexpected success");
 
-    static final Pattern PATTERN_ONE_LINE_RESULT = Pattern.compile(
-            "(\\S*) \\((\\S*)\\) ... (ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
+    static final Pattern PATTERN_ONE_LINE_RESULT =
+            Pattern.compile(
+                    "(\\S*) \\((\\S*)\\) \\.\\.\\. "
+                            + "(ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
     static final Pattern PATTERN_TWO_LINE_RESULT_FIRST = Pattern.compile(
             "(\\S*) \\((\\S*)\\)");
-    static final Pattern PATTERN_TWO_LINE_RESULT_SECOND = Pattern.compile(
-            "(.*) ... (ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
+    static final Pattern PATTERN_TWO_LINE_RESULT_SECOND =
+            Pattern.compile(
+                    "(.*) \\.\\.\\. "
+                            + "(ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
     static final Pattern PATTERN_FAIL_MESSAGE = Pattern.compile(
             "(FAIL|ERROR): (\\S*) \\((\\S*)\\)");
     static final Pattern PATTERN_RUN_SUMMARY = Pattern.compile(
@@ -131,10 +135,10 @@
 
     /** In case of error spanning over multiple lines. */
     static final Pattern MULTILINE_RESULT_WITH_WARNING =
-            Pattern.compile("(.*) ... (.*)", Pattern.DOTALL);
+            Pattern.compile("(.*) \\.\\.\\. (.*)", Pattern.DOTALL);
 
     static final Pattern MULTILINE_FINAL_RESULT_WITH_WARNING =
-            Pattern.compile("(.*) ... (.*)ok(.*)", Pattern.DOTALL);
+            Pattern.compile("(.*) \\.\\.\\. (.*)ok(.*)", Pattern.DOTALL);
 
     static final Pattern PATTERN_RUN_RESULT = Pattern.compile("(OK|FAILED).*");
 
diff --git a/src/com/android/tradefed/testtype/suite/ModuleListener.java b/src/com/android/tradefed/testtype/suite/ModuleListener.java
index 57fd858..f8b5b81 100644
--- a/src/com/android/tradefed/testtype/suite/ModuleListener.java
+++ b/src/com/android/tradefed/testtype/suite/ModuleListener.java
@@ -51,6 +51,12 @@
 
     @Override
     public void testRunStarted(String name, int numTests, int attemptNumber) {
+        // In case of retry of the same run, do not add the expected count again. This allows
+        // situation where test runner has a built-in retry (like InstrumentationTest) and calls
+        // testRunStart several times to be counted properly.
+        if (getTestRunAtAttempt(name, attemptNumber) != null) {
+            numTests = 0;
+        }
         super.testRunStarted(name, numTests, attemptNumber);
         if (attemptNumber != 0) {
             mTestsRan = 1;
diff --git a/tests/res/testtype/python_output2.txt b/tests/res/testtype/python_output2.txt
new file mode 100644
index 0000000..8cbf0ef
--- /dev/null
+++ b/tests/res/testtype/python_output2.txt
@@ -0,0 +1,220 @@
+No handlers could be found for logger "oauth2client.contrib.multistore_file"
+testAddData (public.report_test.ReportTest)
+test AddData. ... ok
+testAddError (public.report_test.ReportTest)
+test AddError. ... ok
+testSetStatus (public.report_test.ReportTest)
+test SetStatus. ... ok
+testCleanup (public.device_driver_test.DeviceDriverTest)
+Test Cleanup. ... ok
+testCreateAndroidVirtualDevices (public.device_driver_test.DeviceDriverTest)
+Test CreateAndroidVirtualDevices. ... ok
+testCreateAndroidVirtualDevicesInternalIP (public.device_driver_test.DeviceDriverTest)
+Test CreateAndroidVirtualDevices with internal IP. ... ok
+testDeleteAndroidVirtualDevices (public.device_driver_test.DeviceDriverTest)
+Test DeleteAndroidVirtualDevices. ... ok
+testLoadConfigFails (public.config_test.AcloudConfigManagerTest)
+Test loading a bad file. ... ok
+testLoadInternalConfig (public.config_test.AcloudConfigManagerTest)
+Test loading internal config. ... ok
+testLoadUserConfig (public.config_test.AcloudConfigManagerTest)
+Test loading user config. ... ok
+testLoadUserConfigLogic (public.config_test.AcloudConfigManagerTest)
+Test load user config logic. ... ok
+testOverrideWithHWProperty (public.config_test.AcloudConfigManagerTest)
+Test override hw property by flavor type. ... ok
+testCreateDevices (public.actions.create_cuttlefish_action_test.CreateCuttlefishActionTest)
+Test CreateDevices. ... ok
+testCreateDevices (public.actions.create_goldfish_action_test.CreateGoldfishActionTest)
+Tests CreateDevices. ... ok
+testCreateDevicesWithoutBuildId (public.actions.create_goldfish_action_test.CreateGoldfishActionTest)
+Test CreateDevices when emulator sys image build id is not provided. ... ok
+testCreateDevicesWithoutEmulatorBuildId (public.actions.create_goldfish_action_test.CreateGoldfishActionTest)
+Test CreateDevices when emulator build id is not provided. ... ok
+testCreateDevices (public.actions.common_operations_test.CommonOperationsTest)
+Test Create Devices. ... ok
+testCreateDevicesInternalIP (public.actions.common_operations_test.CommonOperationsTest)
+Test Create Devices and report internal IP. ... ok
+testDevicePoolCreateDevices (public.actions.common_operations_test.CommonOperationsTest)
+Test Device Pool Create Devices. ... ok
+testParseHWPropertyStr (create.create_common_test.CreateCommonTest)
+Test ParseHWPropertyArgs. ... ok
+testProcessHWPropertyWithInvalidArgs (create.create_common_test.CreateCommonTest)
+Test ParseHWPropertyArgs with invalid args. ... ok
+testGetBranchFromRepo (create.avd_spec_test.AvdSpecTest)
+Test get branch name from repo info. ... ok
+testGetBuildTarget (create.avd_spec_test.AvdSpecTest)
+Test get build target name. ... ok
+testParseHWPropertyStr (create.avd_spec_test.AvdSpecTest)
+Test _ParseHWPropertyStr. ... ok
+testProcessHWPropertyWithInvalidArgs (create.avd_spec_test.AvdSpecTest)
+Test _ProcessHWPropertyArgs with invalid args. ... ok
+testProcessImageArgs (create.avd_spec_test.AvdSpecTest)
+Test process image source. ... ok
+testProcessLocalImageArgs (create.avd_spec_test.AvdSpecTest)
+Test process args.local_image. ... ok
+testAddSshRsa (internal.lib.gcompute_client_test.ComputeClientTest)
+Test AddSshRsa.. ... ok
+testAddSshRsaInvalidKey (internal.lib.gcompute_client_test.ComputeClientTest)
+Test AddSshRsa.. ... ok
+testAttachAccelerator (internal.lib.gcompute_client_test.ComputeClientTest)
+Test AttachAccelerator. ... ok
+testAttachDisk (internal.lib.gcompute_client_test.ComputeClientTest)
+Test AttachDisk. ... ok
+testBatchExecuteOnInstances (internal.lib.gcompute_client_test.ComputeClientTest)
+Test BatchExecuteOnInstances. ... ok
+testCheckImageExistsFalse (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CheckImageExists return False. ... ok
+testCheckImageExistsTrue (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CheckImageExists return True. ... ok
+testCompareMachineSizeBadMetric (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CompareMachineSize with bad metric. ... ok
+testCompareMachineSizeEqual (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CompareMachineSize where two machine sizes are equal. ... ok
+testCompareMachineSizeLarge (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CompareMachineSize where the first one is larger. ... ok
+testCompareMachineSizeSmall (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CompareMachineSize where the first one is smaller. ... ok
+testCreateDiskWithNoSourceProject (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateDisk with images with no set project. ... ok
+testCreateDiskWithProject (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateDisk with images using a set project. ... ok
+testCreateDiskWithTypeSSD (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateDisk with images using standard. ... ok
+testCreateDiskWithTypeStandard (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateDisk with images using standard. ... ok
+testCreateImageFail (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage fails. ... ok
+testCreateImageRaiseDriverErrorWithInvalidInput (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with valid input. ... ok
+testCreateImageRaiseDriverErrorWithValidInput (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with valid input. ... ok
+testCreateImageWithSourceDisk (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with src disk. ... ok
+testCreateImageWithSourceDiskAndLabel (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with src disk and label. ... ok
+testCreateImageWithSourceURI (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateImage with src uri. ... ok
+testCreateInstance (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateInstance. ... ok
+testCreateInstanceWithGpu (internal.lib.gcompute_client_test.ComputeClientTest)
+Test CreateInstance with a GPU parameter not set to None. ... ok
+testDeleteDisks (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteDisks. ... ok
+testDeleteImage (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteImage. ... ok
+testDeleteImages (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteImages. ... ok
+testDeleteInstance (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteInstance. ... ok
+testDeleteInstances (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DeleteInstances. ... ok
+testDetachDisk (internal.lib.gcompute_client_test.ComputeClientTest)
+Test DetachDisk. ... ok
+testGetImage (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetImage. ... ok
+testGetImageOther (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetImage with other project. ... ok
+testGetInstance (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetInstance. ... ok
+testGetInstanceNamesByIPs (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetInstanceNamesByIPs. ... ok
+testGetMachineType (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetMachineType. ... ok
+testGetOperationStatusError (internal.lib.gcompute_client_test.ComputeClientTest)
+Test _GetOperationStatus failed. ... ok
+testGetOperationStatusGlobal (internal.lib.gcompute_client_test.ComputeClientTest)
+Test _GetOperationStatus for global. ... ok
+testGetOperationStatusRegion (internal.lib.gcompute_client_test.ComputeClientTest)
+Test _GetOperationStatus for region. ... ok
+testGetOperationStatusZone (internal.lib.gcompute_client_test.ComputeClientTest)
+Test _GetOperationStatus for zone. ... ok
+testGetSerialPortOutput (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetSerialPortOutput. ... ok
+testGetSerialPortOutputFail (internal.lib.gcompute_client_test.ComputeClientTest)
+Test GetSerialPortOutputFail. ... ok
+testListImages (internal.lib.gcompute_client_test.ComputeClientTest)
+Test ListImages. ... ok
+testListImagesFromExternalProject (internal.lib.gcompute_client_test.ComputeClientTest)
+Test ListImages which accepts different project. ... ok
+testListInstances (internal.lib.gcompute_client_test.ComputeClientTest)
+Test ListInstances. ... ok
+testResetInstance (internal.lib.gcompute_client_test.ComputeClientTest)
+Test ResetInstance. ... ok
+testRetryOnFingerPrintError (internal.lib.gcompute_client_test.ComputeClientTest)
+Test RetryOnFingerPrintError. ... ok
+testSetImageLabel (internal.lib.gcompute_client_test.ComputeClientTest)
+Test SetImageLabel. ... ok
+testWaitOnOperation (internal.lib.gcompute_client_test.ComputeClientTest)
+Test WaitOnOperation. ... ok
+testCreateInstance (internal.lib.cvd_compute_client_test.CvdComputeClientTest)
+Test CreateInstance. ... ok
+testCreatePublicKeyAreCreated (internal.lib.utils_test.UtilsTest)
+Test when the PublicKey created. ... ok
+testCreateSshKeyPairKeyAlreadyExists (internal.lib.utils_test.UtilsTest)
+Test when the key pair already exists. ... ok
+testCreateSshKeyPairKeyAreCreated (internal.lib.utils_test.UtilsTest)
+Test when the key pair created. ... ok
+testRetry (internal.lib.utils_test.UtilsTest)
+Test Retry. ... ok
+testRetryExceptionType (internal.lib.utils_test.UtilsTest)
+Test RetryExceptionType function. ... ok
+testTempDirOrininalErrorRaised (internal.lib.utils_test.UtilsTest)
+Test original error is raised even if tmp dir deletion failed. ... ok
+testTempDirWhenDeleteEncounterError (internal.lib.utils_test.UtilsTest)
+Test create a temp dir and encoutered error during deletion. ... ok
+testTempDirWhenDeleteTempDirNoLongerExist (internal.lib.utils_test.UtilsTest)
+Test create a temp dir and dir no longer exists during deletion. ... ok
+testBatchExecute (internal.lib.base_cloud_client_test.BaseCloudApiClientTest)
+Test BatchExecute. ... ok
+testExecuteWithRetry (internal.lib.base_cloud_client_test.BaseCloudApiClientTest)
+Test Execute is called and retries are triggered. ... ok
+testInitResourceHandle (internal.lib.base_cloud_client_test.BaseCloudApiClientTest)
+Test InitResourceHandle. ... ok
+testListWithMultiPages (internal.lib.base_cloud_client_test.BaseCloudApiClientTest)
+Test ListWithMultiPages. ... ok
+testCopyTo (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test CopyTo. ... ok
+testCopyToWithRetry (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test CopyTo with retry. ... ok
+testDownloadArtifact (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test DownloadArtifact. ... ok
+testDownloadArtifactOSError (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test DownloadArtifact when OSError is raised. ... ok
+testGetBranch (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test GetBuild. ... ok
+testGetLKGB (internal.lib.android_build_client_test.AndroidBuildClientTest)
+Test GetLKGB. ... ok
+testCheckBoot (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CheckBoot. ... ok
+testCheckMachineSizeDoesNotMeetRequirement (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CheckMachineSize when machine size does not meet requirement. ... ok
+testCheckMachineSizeMeetsRequirement (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CheckMachineSize when machine size meets requirement. ... ok
+testCreateImage (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CreateImage. ... ok
+testCreateInstance (internal.lib.android_compute_client_test.AndroidComputeClientTest)
+Test CreateInstance. ... ok
+testCreateInstance (internal.lib.goldfish_compute_client_test.GoldfishComputeClientTest)
+Test CreateInstance. ... ok
+testDelete (internal.lib.gstorage_client_test.StorageClientTest)
+Test Delete. ... ok
+testDeleteMultipleFiles (internal.lib.gstorage_client_test.StorageClientTest)
+Test Delete multiple files. ... ok
+testGet (internal.lib.gstorage_client_test.StorageClientTest)
+Test Get. ... ok
+testGetUrl (internal.lib.gstorage_client_test.StorageClientTest)
+Test GetUrl. ... ok
+testGetUrlNotFound (internal.lib.gstorage_client_test.StorageClientTest)
+Test GetUrl when object is not found. ... ok
+testList (internal.lib.gstorage_client_test.StorageClientTest)
+Test List. ... ok
+testUpload (internal.lib.gstorage_client_test.StorageClientTest)
+Test Upload. ... ok
+testUploadOSError (internal.lib.gstorage_client_test.StorageClientTest)
+Test Upload when OSError is raised. ... ok
+
+----------------------------------------------------------------------
+Ran 107 tests in 0.295s
+
+OK
diff --git a/tests/src/com/android/tradefed/UnitTests.java b/tests/src/com/android/tradefed/UnitTests.java
index d85e024..d70b820 100644
--- a/tests/src/com/android/tradefed/UnitTests.java
+++ b/tests/src/com/android/tradefed/UnitTests.java
@@ -94,6 +94,7 @@
 import com.android.tradefed.invoker.ShardMasterResultForwarderTest;
 import com.android.tradefed.invoker.TestInvocationMultiTest;
 import com.android.tradefed.invoker.TestInvocationTest;
+import com.android.tradefed.invoker.sandbox.ParentSandboxInvocationExecutionTest;
 import com.android.tradefed.invoker.shard.ShardHelperTest;
 import com.android.tradefed.invoker.shard.StrictShardHelperTest;
 import com.android.tradefed.invoker.shard.TestsPoolPollerTest;
@@ -101,6 +102,7 @@
 import com.android.tradefed.log.HistoryLoggerTest;
 import com.android.tradefed.log.LogRegistryTest;
 import com.android.tradefed.log.TerribleFailureEmailHandlerTest;
+import com.android.tradefed.postprocessor.AggregatePostProcessorTest;
 import com.android.tradefed.postprocessor.AveragePostProcessorTest;
 import com.android.tradefed.postprocessor.BasePostProcessorTest;
 import com.android.tradefed.result.BugreportCollectorTest;
@@ -419,6 +421,9 @@
     StrictShardHelperTest.class,
     TestsPoolPollerTest.class,
 
+    // invoker.sandbox
+    ParentSandboxInvocationExecutionTest.class,
+
     // log
     FileLoggerTest.class,
     HistoryLoggerTest.class,
@@ -426,6 +431,7 @@
     TerribleFailureEmailHandlerTest.class,
 
     // postprocessor
+    AggregatePostProcessorTest.class,
     AveragePostProcessorTest.class,
     BasePostProcessorTest.class,
 
diff --git a/tests/src/com/android/tradefed/command/CommandSchedulerTest.java b/tests/src/com/android/tradefed/command/CommandSchedulerTest.java
index 655b32d..5f54fa1 100644
--- a/tests/src/com/android/tradefed/command/CommandSchedulerTest.java
+++ b/tests/src/com/android/tradefed/command/CommandSchedulerTest.java
@@ -1143,6 +1143,9 @@
         mScheduler.shutdownOnEmpty();
         mScheduler.join(2 * 1000);
         verifyMocks(mockListener);
-        assertTrue(mContext.getAttributes().isEmpty());
+
+        // only attribute is invocation ID
+        assertEquals(1, mContext.getAttributes().size());
+        assertNotNull(mContext.getInvocationId());
     }
 }
diff --git a/tests/src/com/android/tradefed/invoker/InvocationContextTest.java b/tests/src/com/android/tradefed/invoker/InvocationContextTest.java
index eef159c..af73f91 100644
--- a/tests/src/com/android/tradefed/invoker/InvocationContextTest.java
+++ b/tests/src/com/android/tradefed/invoker/InvocationContextTest.java
@@ -52,6 +52,17 @@
         mContext = new InvocationContext();
     }
 
+    /** Test setting and getting invocation ID. */
+    @Test
+    public void testGetInvocationID() {
+        // initially null
+        assertNull(mContext.getInvocationId());
+
+        // non-null after adding the ID as an attribute
+        mContext.addInvocationAttribute(IInvocationContext.INVOCATION_ID, "TEST_ID");
+        assertEquals("TEST_ID", mContext.getInvocationId());
+    }
+
     /** Test the reverse look up of the device name in the configuration for an ITestDevice */
     @Test
     public void testGetDeviceName() {
diff --git a/tests/src/com/android/tradefed/invoker/sandbox/ParentSandboxInvocationExecutionTest.java b/tests/src/com/android/tradefed/invoker/sandbox/ParentSandboxInvocationExecutionTest.java
new file mode 100644
index 0000000..a580a83
--- /dev/null
+++ b/tests/src/com/android/tradefed/invoker/sandbox/ParentSandboxInvocationExecutionTest.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.invoker.sandbox;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import com.android.tradefed.build.BuildInfo;
+import com.android.tradefed.config.Configuration;
+import com.android.tradefed.config.ConfigurationDef;
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.IConfiguration;
+import com.android.tradefed.config.IConfigurationFactory;
+import com.android.tradefed.config.OptionSetter;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.device.StubDevice;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.invoker.InvocationContext;
+import com.android.tradefed.sandbox.SandboxOptions;
+import com.android.tradefed.targetprep.ITargetCleaner;
+import com.android.tradefed.targetprep.TargetSetupError;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mockito;
+
+/** Unit tests for {@link ParentSandboxInvocationExecution}. */
+@RunWith(JUnit4.class)
+public class ParentSandboxInvocationExecutionTest {
+
+    private ParentSandboxInvocationExecution mParentSandbox;
+    private IConfiguration mConfig;
+    private IInvocationContext mContext;
+    private IConfigurationFactory mMockFactory;
+    private SandboxOptions mOptions;
+    private ITargetCleaner mMockPreparer;
+    private ITestDevice mMockDevice;
+
+    @Before
+    public void setUp() {
+        mMockFactory = Mockito.mock(IConfigurationFactory.class);
+        mMockPreparer = Mockito.mock(ITargetCleaner.class);
+        mMockDevice = Mockito.mock(ITestDevice.class);
+
+        doReturn(new StubDevice("serial")).when(mMockDevice).getIDevice();
+
+        mParentSandbox =
+                new ParentSandboxInvocationExecution() {
+                    @Override
+                    protected IConfigurationFactory getFactory() {
+                        return mMockFactory;
+                    }
+                };
+        mContext = new InvocationContext();
+        mContext.addAllocatedDevice(ConfigurationDef.DEFAULT_DEVICE_NAME, mMockDevice);
+        mContext.addDeviceBuildInfo(ConfigurationDef.DEFAULT_DEVICE_NAME, new BuildInfo());
+        mConfig = new Configuration("test", "test");
+        mOptions = new SandboxOptions();
+    }
+
+    @Test
+    public void testDefaultSkipSetup_tearDown() throws Throwable {
+        mParentSandbox.doSetup(mContext, mConfig, null);
+        mParentSandbox.doTeardown(mContext, mConfig, null);
+        mParentSandbox.doCleanUp(mContext, mConfig, null);
+
+        verify(mMockFactory, times(0)).createConfigurationFromArgs(Mockito.any());
+    }
+
+    @Test
+    public void testParentConfig() throws Throwable {
+        mConfig.setConfigurationObject(Configuration.SANBOX_OPTIONS_TYPE_NAME, mOptions);
+        OptionSetter setter = new OptionSetter(mOptions);
+        setter.setOptionValue(SandboxOptions.PARENT_PREPARER_CONFIG, "parent-config");
+
+        IConfiguration configParent = new Configuration("test1", "test1");
+        configParent.setTargetPreparer(mMockPreparer);
+        doReturn(configParent)
+                .when(mMockFactory)
+                .createConfigurationFromArgs(new String[] {"parent-config"});
+
+        mParentSandbox.doSetup(mContext, mConfig, null);
+        mParentSandbox.doTeardown(mContext, mConfig, null);
+        mParentSandbox.doCleanUp(mContext, mConfig, null);
+
+        verify(mMockFactory, times(1)).createConfigurationFromArgs(Mockito.any());
+        verify(mMockPreparer, times(1)).setUp(Mockito.any(), Mockito.any());
+        verify(mMockPreparer, times(1)).tearDown(Mockito.any(), Mockito.any(), Mockito.any());
+    }
+
+    @Test
+    public void testParentConfig_exception() throws Throwable {
+        mConfig.setConfigurationObject(Configuration.SANBOX_OPTIONS_TYPE_NAME, mOptions);
+        OptionSetter setter = new OptionSetter(mOptions);
+        setter.setOptionValue(SandboxOptions.PARENT_PREPARER_CONFIG, "parent-config");
+        doThrow(new ConfigurationException("test error"))
+                .when(mMockFactory)
+                .createConfigurationFromArgs(new String[] {"parent-config"});
+
+        try {
+            mParentSandbox.doSetup(mContext, mConfig, null);
+            fail("Should have thrown an exception.");
+        } catch (TargetSetupError expected) {
+            // Expected.
+            assertEquals(
+                    "Check your --parent-preparer-config option: test error null",
+                    expected.getMessage());
+        }
+    }
+}
diff --git a/tests/src/com/android/tradefed/postprocessor/AggregatePostProcessorTest.java b/tests/src/com/android/tradefed/postprocessor/AggregatePostProcessorTest.java
new file mode 100644
index 0000000..d271328
--- /dev/null
+++ b/tests/src/com/android/tradefed/postprocessor/AggregatePostProcessorTest.java
@@ -0,0 +1,365 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.postprocessor;
+
+import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ListMultimap;
+
+import java.util.Map;
+
+/** Unit tests for {@link AggregatePostProcessor} */
+@RunWith(JUnit4.class)
+public class AggregatePostProcessorTest {
+
+    private static final String TEST_CLASS = "test.class";
+    private static final String TEST_NAME = "test.name";
+
+    private static final Integer TEST_ITERATIONS = 3;
+
+    // Upload key suffixes for each aggregate metric
+    private static final String STATS_KEY_MIN = "min";
+    private static final String STATS_KEY_MAX = "max";
+    private static final String STATS_KEY_MEAN = "mean";
+    private static final String STATS_KEY_VAR = "var";
+    private static final String STATS_KEY_STDEV = "stdev";
+    // Separator for final upload
+    private static final String STATS_KEY_SEPARATOR = "-";
+
+    private AggregatePostProcessor mCollector;
+
+    @Before
+    public void setUp() {
+        mCollector = new AggregatePostProcessor();
+    }
+
+    /** Test corrrect aggregation of singular double metrics. */
+    @Test
+    public void testSingularDoubleMetric() {
+        // Singular double metrics test: Sample results and expected aggregate metric values.
+        final String singularDoubleKey = "singular_double";
+        final ImmutableList<String> singularDoubleMetrics = ImmutableList.of("1.1", "2", "2.9");
+        final ImmutableMap<String, String> singularDoubleStats =
+                ImmutableMap.of(
+                        STATS_KEY_MIN, "1.10",
+                        STATS_KEY_MAX, "2.90",
+                        STATS_KEY_MEAN, "2.00",
+                        STATS_KEY_VAR, "0.54",
+                        STATS_KEY_STDEV, "0.73");
+
+        // Construct ListMultimap of multiple iterations of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        for (Integer i = 0; i < TEST_ITERATIONS; i++) {
+            Metric.Builder metricBuilder = Metric.newBuilder();
+            metricBuilder.getMeasurementsBuilder().setSingleString(singularDoubleMetrics.get(i));
+            Metric currentTestMetric = metricBuilder.build();
+            allTestMetrics.put(singularDoubleKey, currentTestMetric);
+        }
+
+        // Test that the correct aggregate metrics are returned.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MIN)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_MIN),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MIN))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MAX)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_MAX),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MAX))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MEAN)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_MEAN),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_MEAN))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_VAR)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_VAR),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_VAR))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_STDEV)));
+        Assert.assertEquals(
+                singularDoubleStats.get(STATS_KEY_STDEV),
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singularDoubleKey, STATS_KEY_STDEV))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+    }
+
+    /** Test correct aggregation of list double metrics. */
+    @Test
+    public void testListDoubleMetric() {
+        // List double metrics test: Sample results and expected aggregate metric values.
+        final String listDoubleKey = "list_double";
+        final ImmutableList<String> listDoubleMetrics =
+                ImmutableList.of("1.1, 2.2", "", "1.5, 2.5, 1.9, 2.9");
+        final ImmutableMap<String, String> listDoubleStats =
+                ImmutableMap.of(
+                        STATS_KEY_MIN, "1.10",
+                        STATS_KEY_MAX, "2.90",
+                        STATS_KEY_MEAN, "2.02",
+                        STATS_KEY_VAR, "0.36",
+                        STATS_KEY_STDEV, "0.60");
+
+        // Construct ListMultimap of multiple iterations of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        for (Integer i = 0; i < TEST_ITERATIONS; i++) {
+            Metric.Builder metricBuilder = Metric.newBuilder();
+            metricBuilder.getMeasurementsBuilder().setSingleString(listDoubleMetrics.get(i));
+            Metric currentTestMetric = metricBuilder.build();
+            allTestMetrics.put(listDoubleKey, currentTestMetric);
+        }
+
+        // Test that the correct aggregate metrics are returned.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, listDoubleKey, STATS_KEY_MIN)));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_MIN),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MIN)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MAX));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_MAX),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MAX)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MEAN));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_MEAN),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_MEAN)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_VAR));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_VAR),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_VAR)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_STDEV));
+        Assert.assertEquals(
+                listDoubleStats.get(STATS_KEY_STDEV),
+                aggregateMetrics
+                        .get(listDoubleKey + STATS_KEY_SEPARATOR + STATS_KEY_STDEV)
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+    }
+
+    /** Test that non-numeric metric does not show up in the reported results. */
+    @Test
+    public void testNonNumericMetric() {
+        // Non-numeric metrics test: Sample results; should not show up in aggregate metrics
+        final String nonNumericKey = "non_numeric";
+        final ImmutableList<String> nonNumericMetrics = ImmutableList.of("1", "success", "failed");
+
+        // Construct ListMultimap of multiple iterations of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        for (Integer i = 0; i < TEST_ITERATIONS; i++) {
+            Metric.Builder metricBuilder = Metric.newBuilder();
+            metricBuilder.getMeasurementsBuilder().setSingleString(nonNumericMetrics.get(i));
+            Metric currentTestMetric = metricBuilder.build();
+            allTestMetrics.put(nonNumericKey, currentTestMetric);
+        }
+
+        // Test that non-numeric metrics do not get returned.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_MIN)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_MAX)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_MEAN)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_VAR)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, nonNumericKey, STATS_KEY_STDEV)));
+    }
+
+    /** Test empty result. */
+    @Test
+    public void testEmptyResult() {
+        final String emptyResultKey = "empty_result";
+
+        // Construct ListMultimap of multiple iterations of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        for (Integer i = 0; i < TEST_ITERATIONS; i++) {
+            Metric.Builder metricBuilder = Metric.newBuilder();
+            metricBuilder.getMeasurementsBuilder().setSingleString("");
+            Metric currentTestMetric = metricBuilder.build();
+            allTestMetrics.put(emptyResultKey, currentTestMetric);
+        }
+
+        // Test that test with empty results do not get returned.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_MIN)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_MAX)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_MEAN)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_VAR)));
+        Assert.assertFalse(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, emptyResultKey, STATS_KEY_STDEV)));
+    }
+
+    /** Test single run. */
+    @Test
+    public void testSingleRun() {
+        final String singleRunKey = "single_run";
+        final String singleRunVal = "1.00";
+        final String zeroStr = "0.00";
+
+        // Construct ListMultimap of a single iteration of test metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        Metric.Builder metricBuilder = Metric.newBuilder();
+        metricBuilder.getMeasurementsBuilder().setSingleString(singleRunVal);
+        Metric currentTestMetric = metricBuilder.build();
+        allTestMetrics.put(singleRunKey, currentTestMetric);
+
+        // Test that single runs still give the correct aggregate metrics.
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MIN)));
+        Assert.assertEquals(
+                singleRunVal,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MIN))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MAX)));
+        Assert.assertEquals(
+                singleRunVal,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MAX))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MEAN)));
+        Assert.assertEquals(
+                singleRunVal,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_MEAN))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_VAR)));
+        Assert.assertEquals(
+                zeroStr,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_VAR))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+        Assert.assertTrue(
+                aggregateMetrics.containsKey(
+                        String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_STDEV)));
+        Assert.assertEquals(
+                zeroStr,
+                aggregateMetrics
+                        .get(String.join(STATS_KEY_SEPARATOR, singleRunKey, STATS_KEY_STDEV))
+                        .build()
+                        .getMeasurements()
+                        .getSingleString());
+    }
+
+    /** Test zero runs. */
+    @Test
+    public void testZeroRun() {
+        // Test that tests with zero runs do not get added to the processed metrics.
+        ListMultimap<String, Metric> allTestMetrics = ArrayListMultimap.create();
+        Map<String, Metric.Builder> aggregateMetrics =
+                mCollector.processAllTestMetrics(allTestMetrics);
+
+        Assert.assertEquals(0, aggregateMetrics.keySet().size());
+    }
+}
diff --git a/tests/src/com/android/tradefed/result/CollectingTestListenerTest.java b/tests/src/com/android/tradefed/result/CollectingTestListenerTest.java
index 11be3bc..d3457f3 100644
--- a/tests/src/com/android/tradefed/result/CollectingTestListenerTest.java
+++ b/tests/src/com/android/tradefed/result/CollectingTestListenerTest.java
@@ -286,6 +286,54 @@
         assertThat(res.getName()).isEqualTo("not started");
     }
 
+    /** Test the listener under a single normal test run that gets sharded */
+    @Test
+    public void testSingleRun_multi() {
+        mCollectingTestListener.testRunStarted("run1", 1);
+        final TestDescription test = new TestDescription("FooTest", "testName1");
+        mCollectingTestListener.testStarted(test);
+        mCollectingTestListener.testEnded(test, new HashMap<String, Metric>());
+        mCollectingTestListener.testRunEnded(0, new HashMap<String, Metric>());
+
+        mCollectingTestListener.testRunStarted("run1", 3);
+        final TestDescription test2 = new TestDescription("FooTest", "testName2");
+        mCollectingTestListener.testStarted(test2);
+        mCollectingTestListener.testEnded(test2, new HashMap<String, Metric>());
+        mCollectingTestListener.testRunFailed("missing tests");
+        mCollectingTestListener.testRunEnded(0, new HashMap<String, Metric>());
+
+        TestRunResult runResult = mCollectingTestListener.getCurrentRunResults();
+
+        assertThat(runResult.isRunComplete()).isTrue();
+        assertThat(runResult.isRunFailure()).isTrue();
+        assertThat(mCollectingTestListener.getNumTotalTests()).isEqualTo(2);
+        assertThat(mCollectingTestListener.getExpectedTests()).isEqualTo(4);
+    }
+
+    /** Test the listener under a single normal test run that gets sharded */
+    @Test
+    public void testSingleRun_multi_failureRunFirst() {
+        mCollectingTestListener.testRunStarted("run1", 3);
+        final TestDescription test2 = new TestDescription("FooTest", "testName2");
+        mCollectingTestListener.testStarted(test2);
+        mCollectingTestListener.testEnded(test2, new HashMap<String, Metric>());
+        mCollectingTestListener.testRunFailed("missing tests");
+        mCollectingTestListener.testRunEnded(0, new HashMap<String, Metric>());
+
+        mCollectingTestListener.testRunStarted("run1", 1);
+        final TestDescription test = new TestDescription("FooTest", "testName1");
+        mCollectingTestListener.testStarted(test);
+        mCollectingTestListener.testEnded(test, new HashMap<String, Metric>());
+        mCollectingTestListener.testRunEnded(0, new HashMap<String, Metric>());
+
+        TestRunResult runResult = mCollectingTestListener.getCurrentRunResults();
+
+        assertThat(runResult.isRunComplete()).isTrue();
+        assertThat(runResult.isRunFailure()).isTrue();
+        assertThat(mCollectingTestListener.getNumTotalTests()).isEqualTo(2);
+        assertThat(mCollectingTestListener.getExpectedTests()).isEqualTo(4);
+    }
+
     /**
      * Injects a single test run with 1 passed test into the {@link CollectingTestListener} under
      * test
diff --git a/tests/src/com/android/tradefed/result/TestRunResultTest.java b/tests/src/com/android/tradefed/result/TestRunResultTest.java
index 523932e..29617e3 100644
--- a/tests/src/com/android/tradefed/result/TestRunResultTest.java
+++ b/tests/src/com/android/tradefed/result/TestRunResultTest.java
@@ -149,7 +149,7 @@
         result.testEnded(test2, new HashMap<String, Metric>());
         result.testRunEnded(0, new HashMap<String, Metric>());
         // Verify rerun.
-        assertEquals(2, result.getExpectedTestCount());
+        assertEquals(3, result.getExpectedTestCount());
         assertTrue(result.isRunFailure());
         assertEquals("failure", result.getRunFailureMessage());
         assertTrue(result.isRunComplete());
diff --git a/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java b/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
index 82a56d4..34edfbe 100644
--- a/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
+++ b/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
@@ -52,6 +52,7 @@
 public class PythonUnitTestResultParserTest {
 
     public static final String PYTHON_OUTPUT_FILE_1 = "python_output1.txt";
+    public static final String PYTHON_OUTPUT_FILE_2 = "python_output2.txt";
 
     private PythonUnitTestResultParser mParser;
     private ITestInvocationListener mMockListener;
@@ -518,6 +519,21 @@
         expectLastCall().times(1);
     }
 
+    /** Test another output starting by a warning */
+    @Test
+    public void testParseRealOutput2() {
+        String[] contents = readInFile(PYTHON_OUTPUT_FILE_2);
+        mMockListener.testRunStarted("test", 107);
+        for (int i = 0; i < 107; i++) {
+            mMockListener.testStarted(EasyMock.anyObject());
+            mMockListener.testEnded(EasyMock.anyObject(), (HashMap<String, Metric>) anyObject());
+        }
+        mMockListener.testRunEnded(295, new HashMap<String, Metric>());
+        replay(mMockListener);
+        mParser.processNewLines(contents);
+        verify(mMockListener);
+    }
+
     private void setTestIdChecks(TestDescription[] ids, boolean[] didPass) {
         for (int i = 0; i < ids.length; i++) {
             mMockListener.testStarted(ids[i]);
diff --git a/tests/src/com/android/tradefed/testtype/suite/ModuleListenerTest.java b/tests/src/com/android/tradefed/testtype/suite/ModuleListenerTest.java
index 27d292f..7e1e170 100644
--- a/tests/src/com/android/tradefed/testtype/suite/ModuleListenerTest.java
+++ b/tests/src/com/android/tradefed/testtype/suite/ModuleListenerTest.java
@@ -23,12 +23,14 @@
 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric;
 import com.android.tradefed.result.ITestInvocationListener;
 import com.android.tradefed.result.TestDescription;
-import java.util.HashMap;
+
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
+import java.util.HashMap;
+
 /** Unit tests for {@link ModuleListener} * */
 @RunWith(JUnit4.class)
 public class ModuleListenerTest {
@@ -99,6 +101,8 @@
 
         assertEquals(numTests, mListener.getNumTotalTests());
         assertEquals(numTests, mListener.getNumTestsInState(TestStatus.PASSED));
+        // Expected count stays as 5
+        assertEquals(numTests, mListener.getExpectedTests());
     }
 
     /** Some test runner calls testRunStart several times. We need to count all their tests. */